def parse_args(argv): parser = argparse.ArgumentParser( prog='python -m %s' % __package__, description='Upload logs to google storage.') parser.add_argument('--dry-run', action='store_true', default=False, help='Do not write anything.') parser.add_argument('--waterfall-url', help='waterfall main URL. Usually http://localhost:XXXX') parser.add_argument('--master-name', required=True, help='name of the master to query. e.g. "chromium"') parser.add_argument('--builder-name', help='name of the builder to query. e.g. "Linux".' 'Must be under specified master. If unspecified, ' 'all builders are considered.') parser.add_argument('--bucket', default=None, help='name of the bucket to use to upload logs, ' 'optional.') parser.add_argument('--limit', default=10, type=int, help='Maximum number of builds to upload in this run.') parser.add_argument('--nice', default=10, type=int, help='Amount of niceness to add to this process and its' 'subprocesses') infra_logs.add_argparse_options(parser) args = parser.parse_args(argv) if args.master_name.startswith('master.'): args.master_name = args.master_name[7:] return args
def parse_args(argv): p = argparse.ArgumentParser() p.add_argument( '--interval', default=10, type=int, help='time (in seconds) between sampling system metrics') p.add_argument( '--root-setup', action='store_true', help='if this is set sysmon will run once to initialise configs in /etc ' 'and then exit immediately. Used on GCE bots to bootstrap sysmon') logs.add_argparse_options(p) ts_mon.add_argparse_options(p) outer_loop.add_argparse_options(p) p.set_defaults(ts_mon_flush='manual') opts = p.parse_args(argv) logs.process_argparse_options(opts) ts_mon.process_argparse_options(opts) loop_opts = outer_loop.process_argparse_options(opts) return opts, loop_opts
def parse_args(args): # pragma: no cover parser = argparse.ArgumentParser('./run.py %s' % __package__) parser.add_argument( '-c', '--configfile', help='Local JSON poller configuration file to override ' 'confilg file from luci-config.') parser.add_argument( '-d', '--credentials_db', help='File to use for Codesite OAuth2 credentials storage.') parser.add_argument( '--datadir', default=DATADIR, help='Directory where persistent app data should be stored.') logs.add_argparse_options(parser) ts_mon.add_argparse_options(parser) outer_loop.add_argparse_options(parser) parser.set_defaults(ts_mon_target_type='task', ts_mon_task_service_name='bugdroid', ts_mon_task_job_name='bugdroid_job') opts = parser.parse_args(args) logs.process_argparse_options(opts) ts_mon.process_argparse_options(opts) loop_opts = outer_loop.process_argparse_options(opts) return opts, loop_opts
def parse_args(argv): p = argparse.ArgumentParser() p.add_argument('--interval', default=10, type=int, help='time (in seconds) between sampling system metrics') p.add_argument( '--root-setup', action='store_true', help='if this is set sysmon will run once to initialise configs in /etc ' 'and then exit immediately. Used on GCE bots to bootstrap sysmon') logs.add_argparse_options(p) ts_mon.add_argparse_options(p) outer_loop.add_argparse_options(p) p.set_defaults(ts_mon_flush='manual') opts = p.parse_args(argv) logs.process_argparse_options(opts) ts_mon.process_argparse_options(opts) loop_opts = outer_loop.process_argparse_options(opts) return opts, loop_opts
def parse_args(argv): if sys.platform == 'win32': default_state_directory = 'C:\\chrome-infra\\service-state' default_config_directory = 'C:\\chrome-infra\\service-config' default_root_directory = 'C:\\infra-python' else: default_state_directory = '/var/run/infra-services' default_config_directory = '/etc/infra-services' default_root_directory = '/opt/infra-python' p = argparse.ArgumentParser( description='Starts and stops machine-wide infra services with arguments ' 'from config files') p.add_argument( '--state-directory', default=default_state_directory, help='directory to store PID files (default %(default)s)') p.add_argument( '--config-directory', default=default_config_directory, help='directory to read JSON config files (default %(default)s)') p.add_argument( '--root-directory', default=default_root_directory, help='directory where the service_manager package is deployed. If this ' 'package is updated the process will exit') p.add_argument( '--config-poll-interval', default=10, help='how frequently (in seconds) to poll the config directory') p.add_argument( '--service-poll-interval', default=10, help='how frequently (in seconds) to restart failed services') p.add_argument( '--root-setup', action='store_true', help='if this is set service_manager will run once to initialise configs ' 'in /etc and then exit immediately. Used on GCE bots to bootstrap ' 'service_manager') logs.add_argparse_options(p) ts_mon.add_argparse_options(p) p.set_defaults( ts_mon_target_type='task', ts_mon_task_service_name='service_manager', ts_mon_task_job_name=socket.getfqdn(), ) opts = p.parse_args(argv) logs.process_argparse_options(opts) ts_mon.process_argparse_options(opts) return opts
def main(args): parser = argparse.ArgumentParser(prog='run.py %s' % __package__) parser.add_argument('data_url', action='store', nargs='*') parser.add_argument('--use-cache', action='store_true') parser.add_argument('--master-filter', action='store') parser.add_argument('--builder-filter', action='store') parser.add_argument('--processes', default=PARALLEL_TASKS, action='store', type=int) parser.add_argument('--jobs', default=CONCURRENT_TASKS, action='store', type=int) logs.add_argparse_options(parser) outer_loop.add_argparse_options(parser) gatekeeper_json = os.path.join(build_scripts_dir, 'slave', 'gatekeeper.json') parser.add_argument('--gatekeeper', action='store', default=gatekeeper_json) gatekeeper_trees_json = os.path.join(build_scripts_dir, 'slave', 'gatekeeper_trees.json') parser.add_argument('--gatekeeper-trees', action='store', default=gatekeeper_trees_json) parser.add_argument('--findit-api-url', help='Query findit results from this url.') args = parser.parse_args(args) logs.process_argparse_options(args) loop_args = outer_loop.process_argparse_options(args) # Suppress all logging from connectionpool; it is too verbose at info level. if args.log_level != logging.DEBUG: class _ConnectionpoolFilter(object): @staticmethod def filter(record): if record.levelno == logging.INFO: return False return True logging.getLogger( 'requests.packages.urllib3.connectionpool').addFilter( _ConnectionpoolFilter()) def outer_loop_iteration(): return inner_loop(args) loop_results = outer_loop.loop(task=outer_loop_iteration, sleep_timeout=lambda: 5, **loop_args) return 0 if loop_results.success else 1
def add_argparse_options(self, parser): """Register any arguments used by this application. Override this method and call parser.add_argument(). Args: parser: An argparse.ArgumentParser object. """ logs.add_argparse_options(parser) ts_mon.add_argparse_options(parser)
def parse_args(argv): p = argparse.ArgumentParser() group = p.add_mutually_exclusive_group(required=True) group.add_argument( '--url', help='URL of one buildbot master to monitor') group.add_argument('--build-dir', help='location of the tools/build directory. Used with --hostname to get ' 'the list of all buildbot masters on this host to monitor. Cannot be ' 'used with --url') p.add_argument('--hostname', default=socket.getfqdn(), help='override local hostname (currently %(default)s). Used with ' '--build-dir to get the list of all buildbot masters on this host to ' 'monitor') p.add_argument( '--interval', default=300, type=int, help='time (in seconds) between sampling the buildbot master') logs.add_argparse_options(p) ts_mon.add_argparse_options(p) outer_loop.add_argparse_options(p) DEFAULT_ARG_VALUE = '(default)' p.set_defaults( ts_mon_flush='manual', ts_mon_target_type='task', ts_mon_task_service_name='mastermon', ts_mon_task_job_name=DEFAULT_ARG_VALUE, ) opts = p.parse_args(argv) if opts.ts_mon_task_job_name == DEFAULT_ARG_VALUE: # The ts_mon job name defaults to either the hostname when monitoring all # masters on a host, or the name of the master extracted from the URL. if opts.build_dir: opts.ts_mon_task_job_name = opts.hostname else: parsed_url = urlparse.urlsplit(opts.url) path_components = [x for x in parsed_url.path.split('/') if x] if path_components: opts.ts_mon_task_job_name = path_components[-1] else: opts.ts_mon_task_job_name = parsed_url.netloc logs.process_argparse_options(opts) ts_mon.process_argparse_options(opts) loop_opts = outer_loop.process_argparse_options(opts) return opts, loop_opts
def parse_args(argv): p = argparse.ArgumentParser() group = p.add_mutually_exclusive_group(required=True) group.add_argument( '--url', help='URL of one buildbot master to monitor') group.add_argument('--build-dir', help='location of the tools/build directory. Used with --hostname to get ' 'the list of all buildbot masters on this host to monitor. Cannot be ' 'used with --url') p.add_argument('--hostname', default=socket.getfqdn(), help='override local hostname (currently %(default)s). Used with ' '--build-dir to get the list of all buildbot masters on this host to ' 'monitor') p.add_argument( '--interval', default=60, type=int, help='time (in seconds) between sampling the buildbot master') logs.add_argparse_options(p) ts_mon.add_argparse_options(p) outer_loop.add_argparse_options(p) DEFAULT_ARG_VALUE = '(default)' p.set_defaults( ts_mon_flush='manual', ts_mon_target_type='task', ts_mon_task_service_name='mastermon', ts_mon_task_job_name=DEFAULT_ARG_VALUE, ) opts = p.parse_args(argv) if opts.ts_mon_task_job_name == DEFAULT_ARG_VALUE: # The ts_mon job name defaults to either the hostname when monitoring all # masters on a host, or the name of the master extracted from the URL. if opts.build_dir: opts.ts_mon_task_job_name = opts.hostname else: parsed_url = urlparse.urlsplit(opts.url) path_components = [x for x in parsed_url.path.split('/') if x] if path_components: opts.ts_mon_task_job_name = path_components[-1] else: opts.ts_mon_task_job_name = parsed_url.netloc logs.process_argparse_options(opts) ts_mon.process_argparse_options(opts) loop_opts = outer_loop.process_argparse_options(opts) return opts, loop_opts
def parse_args(): # pragma: no cover parser = argparse.ArgumentParser( description='Manage the state of a buildbot master. NOTE: Does nothing ' 'unless --prod is specified') parser.add_argument('directory', nargs='?', help='location of the master to manage') parser.add_argument('desired_state', nargs='?', choices=buildbot_state.STATES['desired_buildbot_state'], help='the desired state of the master') parser.add_argument('transition_time_utc', nargs='?', type=float, help='seconds since the UTC epoch to trigger the state') parser.add_argument('--list-all-states', action='store_true', help='list all states with their actions and exit') parser.add_argument('--builder-filter', action='append', default=[], help='appends a Python regular expression to the list of builder ' 'filters. By default, all builders count as building; if builder ' 'filters are supplied, only builders that match at least one filter ' 'will be counted.') parser.add_argument('--drain-timeout', metavar='SECONDS', type=int, default=buildbot_state.DEFAULT_DRAIN_TIMEOUT_SEC, help='sets the drain state timeout, in seconds.') parser.add_argument('--enable-gclient-sync', action='store_true', help='perform a gclient sync before every master start') parser.add_argument('--emergency-file', default='.stop_master_lifecycle', help='filename of the emergency stop file. if this file is found in the ' 'master directory, exit immediately') parser.add_argument('--hostname', default=socket.getfqdn(), help='override local hostname (currently %(default)s)') parser.add_argument('--prod', action='store_true', help='actually run commands instead of printing them.') parser.add_argument('--loop', action='store_true', help='repeatedly run the state machine. will not terminate unless killed') parser.add_argument('--loop-sleep-secs', type=int, default=5, help='how many seconds to wait between loop runs. default %(default)s') parser.add_argument('--connection-timeout', type=int, default=30, help='how many seconds to wait for a master http request before timing ' 'out.') outer_loop.add_argparse_options(parser) logs.add_argparse_options(parser) args = parser.parse_args() logs.process_argparse_options(args) if not args.list_all_states: if not args.directory: parser.error('A master directory must be specified.') if not args.transition_time_utc: parser.error('A transition time must be specified.') if not args.desired_state: parser.error('A desired state must be specified.') return args
def parse_args(): parser = argparse.ArgumentParser( description='Launches master_manager for every master on a host. NOTE: ' 'does not perform any action unless --prod is set.') parser.add_argument('build_dir', nargs='?', help='location of the tools/build directory') parser.add_argument('--hostname', default=socket.getfqdn(), help='override local hostname (currently %(default)s)') parser.add_argument('--json-file', help='load desired master state from a file on disk') parser.add_argument('--json-gitiles', help='load desired master state from a gitiles location') parser.add_argument('--netrc', help='location of the netrc file when connecting to gitiles') parser.add_argument('--command-timeout', help='apply a timeout in seconds to each master_manager process') parser.add_argument('--verify', action='store_true', help='verify the desired master state JSON is valid, then exit') parser.add_argument('--prod', action='store_true', help='actually perform actions instead of doing a dry run') parser.add_argument('--processes', default=16, type=int, help='maximum number of master_manager processes to run simultaneously ' '(default %(default)d)') ts_mon.add_argparse_options(parser) logs.add_argparse_options(parser) parser.set_defaults( ts_mon_target_type='task', ts_mon_task_job_name=socket.getfqdn().split(".")[0], ts_mon_task_service_name='master_manager_launcher', ts_mon_flush_mode='manual', ) args = parser.parse_args() ts_mon.process_argparse_options(args) logs.process_argparse_options(args) if args.json_file and args.json_gitiles: parser.error("Can't specify --json-file and --json-gitiles simultaneously") if not args.json_gitiles and not args.json_file: parser.error('Must specify either --json-gitiles or --json-file.') if not args.verify: if not args.build_dir: parser.error('A build/ directory must be specified.') return args
def parse_args(): parser = argparse.ArgumentParser( description='Launches master_manager for every master on a host. NOTE: ' 'does not perform any action unless --prod is set.') parser.add_argument('build_dir', nargs='?', help='location of the tools/build directory') parser.add_argument('--hostname', default=socket.getfqdn(), help='override local hostname (currently %(default)s)') parser.add_argument('--json-file', help='load desired master state from a file on disk') parser.add_argument( '--json-gitiles', help='load desired master state from a gitiles location') parser.add_argument( '--netrc', help='location of the netrc file when connecting to gitiles') parser.add_argument( '--command-timeout', help='apply a timeout in seconds to each master_manager process') parser.add_argument( '--verify', action='store_true', help='verify the desired master state JSON is valid, then exit') parser.add_argument( '--prod', action='store_true', help='actually perform actions instead of doing a dry run') parser.add_argument( '--processes', default=16, type=int, help='maximum number of master_manager processes to run simultaneously ' '(default %(default)d)') logs.add_argparse_options(parser) args = parser.parse_args() logs.process_argparse_options(args) if args.json_file and args.json_gitiles: parser.error( 'Can\'t specify --json-file and --json-gitiles simultaneously') if not args.json_gitiles and not args.json_file: parser.error('Must specify either --json-gitiles or --json-file.') if not args.verify: if not args.build_dir: parser.error('A build/ directory must be specified.') return args
def add_argparse_options(self, parser): """Register any arguments used by this application. Override this method and call parser.add_argument(). Args: parser: An argparse.ArgumentParser object. """ if self.USES_STANDARD_LOGGING: logs.add_argparse_options(parser) if self.USES_TS_MON: ts_mon.add_argparse_options(parser)
def parse_args(args): # pragma: no cover parser = argparse.ArgumentParser('python -m %s' % __package__) parser.add_argument('--dry_run', action='store_true', help='Do not actually push anything.') parser.add_argument('--repo_dir', metavar='DIR', default='tag_pusher_repos', help=('The directory to use for git clones ' '(default: %(default)s)')) parser.add_argument('--spec_json', metavar='SPEC', required=True, help=('JSON file with configuration: ' '{<repo_url>: [{"refs" : [<ref>], "url": <url>}]}')) parser.add_argument('--json_output', metavar='PATH', help='Path to write JSON with results of the run to') logs.add_argparse_options(parser) outer_loop.add_argparse_options(parser) opts = parser.parse_args(args) logs.process_argparse_options(opts) loop_opts = outer_loop.process_argparse_options(opts) # Read and validate the spec JSON. with open(opts.spec_json, 'r') as f: spec = json.load(f) if not isinstance(spec, dict): parser.error('Expecting dict as a spec') for repo_url, push_list in spec.iteritems(): # Repo URL. parsed = urlparse.urlparse(repo_url) if parsed.scheme not in ('https', 'git', 'file'): parser.error('Repo URL must use https, git or file protocol.') if not parsed.path.strip('/'): parser.error('Repo URL is missing a path?') # Ref and URL to fetch. for d in push_list: refs = d.get('refs') or [] url_to_read = d.get('url') for ref in refs: if not ref or not ref.startswith('refs/'): parser.error('Ref to push should start with refs/') if not url_to_read or not url_to_read.startswith('https://'): parser.error('URL to read SHA1 from should use https') # git2.Repo -> [([ref_to_push], url_to_read)]. spec_by_repo = {} for url, push_list in spec.iteritems(): repo = git2.Repo(url) repo.dry_run = opts.dry_run repo.repos_dir = opts.repo_dir spec_by_repo[repo] = [(d['refs'], d['url']) for d in push_list] return Options(spec_by_repo, loop_opts, opts.json_output)
def parse_args(args): # pragma: no cover def check_url(s): parsed = urlparse.urlparse(s) if parsed.scheme not in ('https', 'git', 'file'): raise argparse.ArgumentTypeError( 'Repo URL must use https, git or file protocol.') if not parsed.path.strip('/'): raise argparse.ArgumentTypeError('URL is missing a path?') return git2.Repo(s) parser = argparse.ArgumentParser('./run.py %s' % __package__) parser.add_argument('--dry_run', action='store_true', help='Do not actually push anything.') parser.add_argument('--repo_dir', metavar='DIR', default='gsubtreed_repos', help=('The directory to use for git clones ' '(default: %(default)s)')) parser.add_argument('--json_output', metavar='PATH', help='Path to write JSON with results of the run to') parser.add_argument('repo', nargs=1, help='The url of the repo to act on.', type=check_url) logs.add_argparse_options(parser) ts_mon.add_argparse_options(parser) outer_loop.add_argparse_options(parser) parser.set_defaults( ts_mon_target_type='task', ts_mon_task_service_name='gsubtreed', ) opts = parser.parse_args(args) repo = opts.repo[0] repo.repos_dir = os.path.abspath(opts.repo_dir) if not opts.ts_mon_task_job_name: parsed_repo_url = urlparse.urlparse(repo.url) opts.ts_mon_task_job_name = '%s%s' % (parsed_repo_url.netloc, parsed_repo_url.path) logs.process_argparse_options(opts) ts_mon.process_argparse_options(opts) loop_opts = outer_loop.process_argparse_options(opts) return Options(repo, loop_opts, opts.json_output, opts.dry_run)
def parse_args(argv): p = argparse.ArgumentParser( description='Starts and stops machine-wide infra services with arguments ' 'from config files') p.add_argument( '--state-directory', default='/var/run/infra-services', help='directory to store PID files (default %(default)s)') p.add_argument( '--config-directory', default='/etc/infra-services', help='directory to read JSON config files (default %(default)s)') p.add_argument( '--root-directory', default='/opt/infra-python', help='directory where the service_manager package is deployed. If this ' 'package is updated the process will exit') p.add_argument( '--config-poll-interval', default=10, help='how frequently (in seconds) to poll the config directory') p.add_argument( '--service-poll-interval', default=10, help='how frequently (in seconds) to restart failed services') p.add_argument( '--root-setup', action='store_true', help='if this is set service_manager will run once to initialise configs ' 'in /etc and then exit immediately. Used on GCE bots to bootstrap ' 'service_manager') logs.add_argparse_options(p) ts_mon.add_argparse_options(p) p.set_defaults( ts_mon_target_type='task', ts_mon_task_service_name='service_manager', ts_mon_task_job_name=socket.getfqdn(), ) opts = p.parse_args(argv) logs.process_argparse_options(opts) ts_mon.process_argparse_options(opts) return opts
def main(argv): p = argparse.ArgumentParser() p.add_argument('--dry-run', action='store_true', help='Make plan but do nothing.') p.add_argument('--reference', metavar='DIR', help='Path to a repo to use for reference.') p.add_argument('--repo-dir', metavar='DIR', default='bootstrap_from_existing', help=('The directory to use for git clones ' '(default: %(default)s)')) p.add_argument('subpath', nargs='*', help='subpaths to mirror from') logs.add_argparse_options(p) opts = p.parse_args(argv) logs.process_argparse_options(opts) # TODO(iannucci): make this work for other refs? repo = Repo(CHROMIUM_URL) repo.repos_dir = os.path.abspath(opts.repo_dir) repo.reify(share_from=opts.reference) repo.run('fetch') plan = [] for path in opts.subpath: plan.extend(process_path(repo, path)) print 'Plan of attack: ' for task in plan: print ' Note ', '%r: %s mirrored from %s' % task print prompt = 'yes'.startswith(raw_input('Continue? [y/N] ').lower() or 'no') if opts.dry_run or not prompt: print 'Doing nothing' return 0 for sub_url, obj, matched in plan: remote_name = hashlib.sha1(sub_url).hexdigest() notes_ref = 'refs/remotes/%s/notes/extra_footers' % remote_name repo.run('notes', '--ref', notes_ref, 'add', '-m', '%s: %s' % (MIRRORED_COMMIT, matched), obj) repo.run('push', remote_name, '%s:refs/notes/extra_footers' % (notes_ref, )) return 0
def main(args): parser = argparse.ArgumentParser(prog='run.py %s' % __package__) parser.add_argument('data_url', action='store', nargs='*') parser.add_argument('--use-cache', action='store_true') parser.add_argument('--master-filter', action='store') parser.add_argument('--builder-filter', action='store') parser.add_argument('--processes', default=PARALLEL_TASKS, action='store', type=int) parser.add_argument('--jobs', default=CONCURRENT_TASKS, action='store', type=int) logs.add_argparse_options(parser) outer_loop.add_argparse_options(parser) gatekeeper_json = os.path.join(build_scripts_dir, 'slave', 'gatekeeper.json') parser.add_argument('--gatekeeper', action='store', default=gatekeeper_json) gatekeeper_trees_json = os.path.join(build_scripts_dir, 'slave', 'gatekeeper_trees.json') parser.add_argument('--gatekeeper-trees', action='store', default=gatekeeper_trees_json) parser.add_argument('--findit-api-url', help='Query findit results from this url.') args = parser.parse_args(args) logs.process_argparse_options(args) loop_args = outer_loop.process_argparse_options(args) # Suppress all logging from connectionpool; it is too verbose at info level. if args.log_level != logging.DEBUG: class _ConnectionpoolFilter(object): @staticmethod def filter(record): if record.levelno == logging.INFO: return False return True logging.getLogger('requests.packages.urllib3.connectionpool').addFilter( _ConnectionpoolFilter()) def outer_loop_iteration(): return inner_loop(args) loop_results = outer_loop.loop( task=outer_loop_iteration, sleep_timeout=lambda: 5, **loop_args) return 0 if loop_results.success else 1
def main(args): parser = argparse.ArgumentParser() parser.add_argument( '-n', '--nice', type=int, metavar='VALUE', help='Set the nice level of the process to VALUE prior to execution.') parser.add_argument( 'master_paths', nargs='+', help='The paths to the master base directories to monitor. Consider ' 'the /path/to/build/masters/master.* wildcard to specify all of ' 'them.') logs.add_argparse_options(parser) ts_mon.add_argparse_options(parser) # Parse arguments. args = parser.parse_args(args) logs.process_argparse_options(args) ts_mon.process_argparse_options(args) # Try setting the nice value; if it fails, eat the error and continue. if args.nice: logging.debug('Setting process "nice" to: %d', args.nice) try: os.nice(args.nice) except OSError as e: logging.error('Failed to update "nice" to %d: %s', args.nice, e) # Update global state calculations. logging.info('Pulling master state from: %s', args.master_paths) for master_path in args.master_paths: master_name = master_path_to_name(master_path) # Log to the target: buildbot/master/<master_name> target = ts_mon.TaskTarget('buildbot/master', master_name, args.ts_mon_task_region, args.ts_mon_task_hostname, args.ts_mon_task_number) logging.info('Collecting log state for master "%s" at: %s', master_name, master_path) get_master_state(master_path, target) logging.info('Flushing collected information.') ts_mon.flush() return 0
def main(args): parser = argparse.ArgumentParser() parser.add_argument('-n', '--nice', type=int, metavar='VALUE', help='Set the nice level of the process to VALUE prior to execution.') parser.add_argument('master_paths', nargs='+', help='The paths to the master base directories to monitor. Consider ' 'the /path/to/build/masters/master.* wildcard to specify all of ' 'them.') logs.add_argparse_options(parser) ts_mon.add_argparse_options(parser) # Parse arguments. args = parser.parse_args(args) logs.process_argparse_options(args) ts_mon.process_argparse_options(args) # Try setting the nice value; if it fails, eat the error and continue. if args.nice: logging.debug('Setting process "nice" to: %d', args.nice) try: os.nice(args.nice) except OSError as e: logging.error('Failed to update "nice" to %d: %s', args.nice, e) # Update global state calculations. logging.info('Pulling master state from: %s', args.master_paths) for master_path in args.master_paths: master_name = master_path_to_name(master_path) # Log to the target: buildbot/master/<master_name> target = ts_mon.TaskTarget( 'buildbot/master', master_name, args.ts_mon_task_region, args.ts_mon_task_hostname, args.ts_mon_task_number) logging.info('Collecting log state for master "%s" at: %s', master_name, master_path) get_master_state(master_path, target) logging.info('Flushing collected information.') ts_mon.flush() return 0
def parse_args(args): # pragma: no cover parser = argparse.ArgumentParser('./run.py %s' % __package__) parser.add_argument( '-c', '--configfile', help='Local JSON poller configuration file to override ' 'config file from luci-config.') parser.add_argument( '-d', '--credentials_db', help= 'File to use for OAuth2 credentials storage if not running on LUCI.') parser.add_argument( '--datadir', default=DATADIR, help='Directory where persistent app data should be stored.') parser.add_argument( '--dryrun', action='store_true', help='Don\'t update monorail issues or update issues to the bugdroid ' 'appengine app.') logs.add_argparse_options(parser) ts_mon.add_argparse_options(parser) outer_loop.add_argparse_options(parser) parser.set_defaults(log_level=logging.DEBUG, ts_mon_target_type='task', ts_mon_task_service_name='bugdroid', ts_mon_task_job_name='bugdroid_job') opts = parser.parse_args(args) logs.process_argparse_options(opts) ts_mon.process_argparse_options(opts) loop_opts = outer_loop.process_argparse_options(opts) # We need to include the logger ID (i.e. "%(name)s") in the formatter string. # Override the root logging handler set by infra_libs.logs. logging.root.handlers[0].setFormatter( logging.Formatter( '[%(severity)s%(iso8601)s %(process)d %(thread)d ' '%(fullModuleName)s:%(lineno)s] (%(name)s) %(message)s')) return opts, loop_opts
def main(argv): p = argparse.ArgumentParser() p.add_argument('--dry-run', action='store_true', help='Make plan but do nothing.') p.add_argument('--reference', metavar='DIR', help='Path to a repo to use for reference.') p.add_argument('--repo-dir', metavar='DIR', default='bootstrap_from_existing', help=('The directory to use for git clones ' '(default: %(default)s)')) p.add_argument('subpath', nargs='*', help='subpaths to mirror from') logs.add_argparse_options(p) opts = p.parse_args(argv) logs.process_argparse_options(opts) # TODO(iannucci): make this work for other refs? repo = Repo(CHROMIUM_URL) repo.repos_dir = os.path.abspath(opts.repo_dir) repo.reify(share_from=opts.reference) repo.run('fetch') plan = [] for path in opts.subpath: plan.extend(process_path(repo, path)) print 'Plan of attack: ' for task in plan: print ' Note ', '%r: %s mirrored from %s' % task print prompt = 'yes'.startswith(raw_input('Continue? [y/N] ').lower() or 'no') if opts.dry_run or not prompt: print 'Doing nothing' return 0 for sub_url, obj, matched in plan: remote_name = hashlib.sha1(sub_url).hexdigest() notes_ref = 'refs/remotes/%s/notes/extra_footers' % remote_name repo.run('notes', '--ref', notes_ref, 'add', '-m', '%s: %s' % (MIRRORED_COMMIT, matched), obj) repo.run('push', remote_name, '%s:refs/notes/extra_footers' % (notes_ref,)) return 0
def parse_args(args): # pragma: no cover def check_url(s): parsed = urlparse.urlparse(s) if parsed.scheme not in ('https', 'git', 'file'): raise argparse.ArgumentTypeError( 'Repo URL must use https, git or file protocol.') if not parsed.path.strip('/'): raise argparse.ArgumentTypeError('URL is missing a path?') return git2.Repo(s) parser = argparse.ArgumentParser('./run.py %s' % __package__) parser.add_argument('--dry_run', action='store_true', help='Do not actually push anything.') parser.add_argument('--repo_dir', metavar='DIR', default='gsubtreed_repos', help=('The directory to use for git clones ' '(default: %(default)s)')) parser.add_argument('--json_output', metavar='PATH', help='Path to write JSON with results of the run to') parser.add_argument('repo', nargs=1, help='The url of the repo to act on.', type=check_url) logs.add_argparse_options(parser) ts_mon.add_argparse_options(parser) outer_loop.add_argparse_options(parser) parser.set_defaults( ts_mon_target_type='task', ts_mon_task_service_name='gsubtreed', ) opts = parser.parse_args(args) repo = opts.repo[0] repo.dry_run = opts.dry_run repo.repos_dir = os.path.abspath(opts.repo_dir) if not opts.ts_mon_task_job_name: opts.ts_mon_task_job_name = urlparse.urlparse(repo.url).path logs.process_argparse_options(opts) ts_mon.process_argparse_options(opts) loop_opts = outer_loop.process_argparse_options(opts) return Options(repo, loop_opts, opts.json_output)
def parse_args(argv): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('master', nargs='+', help='Name of masters (*, master.*) to clean.') parser.add_argument('--max-twistd-log-age', metavar='AGE-TOKENS', default=None, type=argparse_timedelta_type, help='If set, "twistd.log" files older than this will be purged.') parser.add_argument('--production', action='store_true', help='If set, actually delete the files instead of listing them.') parser.add_argument('--gclient-root', help='The path to the directory containing the master checkout ' '".gclient" file. If omitted, an attempt will be made to probe ' 'one.') logs.add_argparse_options(parser) opts = parser.parse_args(argv) logs.process_argparse_options(opts) return opts
def parse_args(argv): parser = argparse.ArgumentParser( prog='python -m %s' % __package__, description='Upload logs to google storage.') parser.add_argument('--dry-run', action='store_true', default=False, help='Do not write anything.') parser.add_argument( '--waterfall-url', help='waterfall main URL. Usually http://localhost:XXXX') parser.add_argument('--master-name', required=True, help='name of the master to query. e.g. "chromium"') parser.add_argument('--builder-name', help='name of the builder to query. e.g. "Linux".' 'Must be under specified master. If unspecified, ' 'all builders are considered.') parser.add_argument('--bucket', default=None, help='name of the bucket to use to upload logs, ' 'optional.') parser.add_argument('--limit', default=10, type=int, help='Maximum number of builds to upload in this run.') parser.add_argument( '--nice', default=10, type=int, help='Amount of niceness to add to this process and its' 'subprocesses') infra_logs.add_argparse_options(parser) args = parser.parse_args(argv) if args.master_name.startswith('master.'): args.master_name = args.master_name[7:] return args
def parse_args(args): parser = argparse.ArgumentParser('python -m %s' % __package__) parser.add_argument('--project', required=True) parser.add_argument('--range', required=True) logs.add_argparse_options(parser) ts_mon.add_argparse_options(parser) parser.set_defaults( logs_directory='', ts_mon_target_type='task', ts_mon_task_service_name='cq_stats_uploader', ) opts = parser.parse_args(args) if not opts.ts_mon_task_job_name: opts.ts_mon_task_job_name = '%s-%s' % (opts.project, opts.range) logs.process_argparse_options(opts) ts_mon.process_argparse_options(opts) return opts
def main(argv): p = argparse.ArgumentParser() p.add_argument( '--repo-dir', metavar='DIR', default='zip_release_commits_repos', help='The directory to use for git clones (default: %(default)s).') logs.add_argparse_options(p) opts = p.parse_args(argv) logs.process_argparse_options(opts) # Get all refs r = Repo('https://chromium.googlesource.com/chromium/src') r.repos_dir = os.path.abspath(opts.repo_dir) r.reify() r.fetch() all_releases = r['refs/heads/ignore/foo'] tags = r.run( 'for-each-ref', '--sort=committerdate', '--format=%(objectname) %(refname)', 'refs/tags' ).splitlines() already_have = set() try: already_have = r.run('rev-list', '--first-parent', '--parents', all_releases.ref).splitlines() # Last commit in chain is the null commit already_have = set(l.split()[-1] for l in already_have[:-1]) except CalledProcessError: pass for hsh_tag in tags: hsh, tag = hsh_tag.split() if hsh in already_have: print 'skipping', tag continue print 'processing', tag c = r.get_commit(hsh) if all_releases.commit is INVALID: cu = c.data.committer cu = cu.alter(timestamp=cu.timestamp.alter(secs=cu.timestamp.secs-1)) au = c.data.author au = au.alter(timestamp=au.timestamp.alter(secs=au.timestamp.secs-1)) all_releases.update_to(c.alter( author=au, committer=cu, parents=(), tree=None, )) parents = [all_releases.commit.hsh, c.hsh] all_releases.update_to(c.alter( author=c.data.committer, message_lines=[tag], parents=parents, tree=None, )) print all_releases.commit r.run('push', 'origin', '%s:%s' % (all_releases.commit.hsh, all_releases.ref))
def main(argv): p = argparse.ArgumentParser() p.add_argument( '--repo-dir', metavar='DIR', default='zip_release_commits_repos', help='The directory to use for git clones (default: %(default)s).') logs.add_argparse_options(p) opts = p.parse_args(argv) logs.process_argparse_options(opts) # Get all refs r = Repo('https://chromium.googlesource.com/chromium/src') r.repos_dir = os.path.abspath(opts.repo_dir) r.reify() r.fetch() all_releases = r['refs/heads/ignore/foo'] tags = r.run('for-each-ref', '--sort=committerdate', '--format=%(objectname) %(refname)', 'refs/tags').splitlines() already_have = set() try: already_have = r.run('rev-list', '--first-parent', '--parents', all_releases.ref).splitlines() # Last commit in chain is the null commit already_have = set(l.split()[-1] for l in already_have[:-1]) except CalledProcessError: pass for hsh_tag in tags: hsh, tag = hsh_tag.split() if hsh in already_have: print 'skipping', tag continue print 'processing', tag c = r.get_commit(hsh) if all_releases.commit is INVALID: cu = c.data.committer cu = cu.alter(timestamp=cu.timestamp.alter(secs=cu.timestamp.secs - 1)) au = c.data.author au = au.alter(timestamp=au.timestamp.alter(secs=au.timestamp.secs - 1)) all_releases.update_to( c.alter( author=au, committer=cu, parents=(), tree=None, )) parents = [all_releases.commit.hsh, c.hsh] all_releases.update_to( c.alter( author=c.data.committer, message_lines=[tag], parents=parents, tree=None, )) print all_releases.commit r.run('push', 'origin', '%s:%s' % (all_releases.commit.hsh, all_releases.ref))
def main(args): parser = argparse.ArgumentParser(prog='run.py %s' % __package__) parser.add_argument('data_url', action='store', nargs='*') # Deprecated parser.add_argument('--use-cache', action='store_true') parser.add_argument('--master-filter', action='store') parser.add_argument('--builder-filter', action='store') parser.add_argument('--processes', default=PARALLEL_TASKS, action='store', type=int) parser.add_argument('--jobs', default=CONCURRENT_TASKS, action='store', type=int) logs.add_argparse_options(parser) outer_loop.add_argparse_options(parser) gatekeeper_json = os.path.join(build_scripts_dir, 'slave', 'gatekeeper.json') parser.add_argument('--gatekeeper', action='store', default=gatekeeper_json) gatekeeper_trees_json = os.path.join(build_scripts_dir, 'slave', 'gatekeeper_trees.json') parser.add_argument('--gatekeeper-trees', action='store', default=gatekeeper_trees_json) parser.add_argument('--findit-api-url', help='Query findit results from this url.') parser.add_argument('--crbug-service-account', help='Path to a service account JSON file to be used to ' 'search for relevant issues on crbug.com.') parser.add_argument('--use-monorail', default=False, action='store_true', help='When specified, Monorail API is used to search for ' 'issues on crbug') parser.add_argument('--api-endpoint-prefix', help='Endpoint prefix for posting alerts. Old API ' 'endpoint will be formed by adding value specified ' 'in --old-api-path to the prefix, new API endpoints ' 'will be formed by adding ' '/api/v1/alerts/<tree_name>.') parser.add_argument('--old-api-path', help='Path to be appended to --api-endpoint-prefix to ' 'form old API endpoint.') args = parser.parse_args(args) logs.process_argparse_options(args) loop_args = outer_loop.process_argparse_options(args) # TODO(sergiyb): Remove support for data_url when builder_alerts recipes are # updated and using new syntax to call this script. if args.data_url: if (len(args.data_url) == 1 and args.data_url[0].endswith('alerts') and not args.api_endpoint_prefix and not args.old_api_path): logging.warn( 'You are using positional argument to specify URL to post updates ' 'to. Please use --api-endpoint-prefix and --old-api-path instead.') slash_index = args.data_url[0].rindex('/') args.api_endpoint_prefix = args.data_url[0][:slash_index] args.old_api_path = args.data_url[0][slash_index+1:] else: logging.error( 'Unsupported positional argument(s) or used together with ' '--api-endpoint-prefix/--old-api-path. Please use only ' '--api-endpoint-prefix and --old-api-path to specify URL to post new ' 'alerts to.') return # Suppress all logging from connectionpool; it is too verbose at info level. if args.log_level != logging.DEBUG: class _ConnectionpoolFilter(object): @staticmethod def filter(record): if record.levelno == logging.INFO: return False return True logging.getLogger('requests.packages.urllib3.connectionpool').addFilter( _ConnectionpoolFilter()) def outer_loop_iteration(): return inner_loop(args) loop_results = outer_loop.loop( task=outer_loop_iteration, sleep_timeout=lambda: 5, **loop_args) return 0 if loop_results.success else 1
def parse_args(): # pragma: no cover parser = argparse.ArgumentParser( description='Manage the state of a buildbot master. NOTE: Does nothing ' 'unless --prod is specified') parser.add_argument('directory', nargs='?', help='location of the master to manage') parser.add_argument( 'desired_state', nargs='?', choices=buildbot_state.STATES['desired_buildbot_state'], help='the desired state of the master') parser.add_argument( 'transition_time_utc', nargs='?', type=float, help='seconds since the UTC epoch to trigger the state') parser.add_argument('--list-all-states', action='store_true', help='list all states with their actions and exit') parser.add_argument( '--enable-gclient-sync', action='store_true', help='perform a gclient sync before every master start') parser.add_argument( '--emergency-file', default='.stop_master_lifecycle', help='filename of the emergency stop file. if this file is found in the ' 'master directory, exit immediately') parser.add_argument('--hostname', default=socket.getfqdn(), help='override local hostname (currently %(default)s)') parser.add_argument('--prod', action='store_true', help='actually run commands instead of printing them.') parser.add_argument( '--loop', action='store_true', help= 'repeatedly run the state machine. will not terminate unless killed') parser.add_argument( '--loop-sleep-secs', type=int, default=5, help='how many seconds to wait between loop runs. default %(default)s') parser.add_argument( '--connection-timeout', type=int, default=30, help='how many seconds to wait for a master http request before timing ' 'out.') outer_loop.add_argparse_options(parser) logs.add_argparse_options(parser) args = parser.parse_args() logs.process_argparse_options(args) if not args.list_all_states: if not args.directory: parser.error('A master directory must be specified.') if not args.transition_time_utc: parser.error('A transition time must be specified.') if not args.desired_state: parser.error('A desired state must be specified.') return args