コード例 #1
0
def parse_args(argv):
    p = argparse.ArgumentParser()

    p.add_argument('--interval',
                   default=10,
                   type=int,
                   help='time (in seconds) between sampling system metrics')
    p.add_argument(
        '--root-setup',
        action='store_true',
        help='if this is set sysmon will run once to initialise configs in /etc '
        'and then exit immediately.  Used on GCE bots to bootstrap sysmon')

    logs.add_argparse_options(p)
    ts_mon.add_argparse_options(p)
    outer_loop.add_argparse_options(p)

    p.set_defaults(ts_mon_flush='manual')
    opts = p.parse_args(argv)

    logs.process_argparse_options(opts)
    ts_mon.process_argparse_options(opts)
    loop_opts = outer_loop.process_argparse_options(opts)

    return opts, loop_opts
コード例 #2
0
ファイル: __main__.py プロジェクト: nicko96/Chrome-Infra
def parse_args(argv):
  p = argparse.ArgumentParser()

  p.add_argument(
      '--interval',
      default=10, type=int,
      help='time (in seconds) between sampling system metrics')
  p.add_argument(
      '--root-setup',
      action='store_true',
      help='if this is set sysmon will run once to initialise configs in /etc '
           'and then exit immediately.  Used on GCE bots to bootstrap sysmon')

  logs.add_argparse_options(p)
  ts_mon.add_argparse_options(p)
  outer_loop.add_argparse_options(p)

  p.set_defaults(ts_mon_flush='manual')
  opts = p.parse_args(argv)

  logs.process_argparse_options(opts)
  ts_mon.process_argparse_options(opts)
  loop_opts = outer_loop.process_argparse_options(opts)

  return opts, loop_opts
コード例 #3
0
ファイル: __main__.py プロジェクト: mcgreevy/chromium-infra
def parse_args(args):  # pragma: no cover
    parser = argparse.ArgumentParser('./run.py %s' % __package__)
    parser.add_argument(
        '-c',
        '--configfile',
        help='Local JSON poller configuration file to override '
        'confilg file from luci-config.')
    parser.add_argument(
        '-d',
        '--credentials_db',
        help='File to use for Codesite OAuth2 credentials storage.')
    parser.add_argument(
        '--datadir',
        default=DATADIR,
        help='Directory where persistent app data should be stored.')

    logs.add_argparse_options(parser)
    ts_mon.add_argparse_options(parser)
    outer_loop.add_argparse_options(parser)

    parser.set_defaults(ts_mon_target_type='task',
                        ts_mon_task_service_name='bugdroid',
                        ts_mon_task_job_name='bugdroid_job')
    opts = parser.parse_args(args)

    logs.process_argparse_options(opts)
    ts_mon.process_argparse_options(opts)
    loop_opts = outer_loop.process_argparse_options(opts)

    return opts, loop_opts
コード例 #4
0
def main(args):
    parser = argparse.ArgumentParser(prog='run.py %s' % __package__)
    parser.add_argument('data_url', action='store', nargs='*')
    parser.add_argument('--use-cache', action='store_true')
    parser.add_argument('--master-filter', action='store')
    parser.add_argument('--builder-filter', action='store')
    parser.add_argument('--processes',
                        default=PARALLEL_TASKS,
                        action='store',
                        type=int)
    parser.add_argument('--jobs',
                        default=CONCURRENT_TASKS,
                        action='store',
                        type=int)
    logs.add_argparse_options(parser)
    outer_loop.add_argparse_options(parser)

    gatekeeper_json = os.path.join(build_scripts_dir, 'slave',
                                   'gatekeeper.json')
    parser.add_argument('--gatekeeper',
                        action='store',
                        default=gatekeeper_json)
    gatekeeper_trees_json = os.path.join(build_scripts_dir, 'slave',
                                         'gatekeeper_trees.json')
    parser.add_argument('--gatekeeper-trees',
                        action='store',
                        default=gatekeeper_trees_json)

    parser.add_argument('--findit-api-url',
                        help='Query findit results from this url.')

    args = parser.parse_args(args)
    logs.process_argparse_options(args)
    loop_args = outer_loop.process_argparse_options(args)

    # Suppress all logging from connectionpool; it is too verbose at info level.
    if args.log_level != logging.DEBUG:

        class _ConnectionpoolFilter(object):
            @staticmethod
            def filter(record):
                if record.levelno == logging.INFO:
                    return False
                return True

        logging.getLogger(
            'requests.packages.urllib3.connectionpool').addFilter(
                _ConnectionpoolFilter())

    def outer_loop_iteration():
        return inner_loop(args)

    loop_results = outer_loop.loop(task=outer_loop_iteration,
                                   sleep_timeout=lambda: 5,
                                   **loop_args)

    return 0 if loop_results.success else 1
コード例 #5
0
ファイル: __main__.py プロジェクト: nicko96/Chrome-Infra
def parse_args(argv):
  p = argparse.ArgumentParser()

  group = p.add_mutually_exclusive_group(required=True)
  group.add_argument(
      '--url',
      help='URL of one buildbot master to monitor')
  group.add_argument('--build-dir',
      help='location of the tools/build directory. Used with --hostname to get '
      'the list of all buildbot masters on this host to monitor. Cannot be '
      'used with --url')

  p.add_argument('--hostname',
      default=socket.getfqdn(),
      help='override local hostname (currently %(default)s). Used with '
      '--build-dir to get the list of all buildbot masters on this host to '
      'monitor')
  p.add_argument(
      '--interval',
      default=60, type=int,
      help='time (in seconds) between sampling the buildbot master')

  logs.add_argparse_options(p)
  ts_mon.add_argparse_options(p)
  outer_loop.add_argparse_options(p)

  DEFAULT_ARG_VALUE = '(default)'

  p.set_defaults(
      ts_mon_flush='manual',
      ts_mon_target_type='task',
      ts_mon_task_service_name='mastermon',
      ts_mon_task_job_name=DEFAULT_ARG_VALUE,
  )
  opts = p.parse_args(argv)

  if opts.ts_mon_task_job_name == DEFAULT_ARG_VALUE:
    # The ts_mon job name defaults to either the hostname when monitoring all
    # masters on a host, or the name of the master extracted from the URL.
    if opts.build_dir:
      opts.ts_mon_task_job_name = opts.hostname
    else:
      parsed_url = urlparse.urlsplit(opts.url)
      path_components = [x for x in parsed_url.path.split('/') if x]
      if path_components:
        opts.ts_mon_task_job_name = path_components[-1]
      else:
        opts.ts_mon_task_job_name = parsed_url.netloc

  logs.process_argparse_options(opts)
  ts_mon.process_argparse_options(opts)
  loop_opts = outer_loop.process_argparse_options(opts)

  return opts, loop_opts
コード例 #6
0
def parse_args(argv):
  p = argparse.ArgumentParser()

  group = p.add_mutually_exclusive_group(required=True)
  group.add_argument(
      '--url',
      help='URL of one buildbot master to monitor')
  group.add_argument('--build-dir',
      help='location of the tools/build directory. Used with --hostname to get '
      'the list of all buildbot masters on this host to monitor. Cannot be '
      'used with --url')

  p.add_argument('--hostname',
      default=socket.getfqdn(),
      help='override local hostname (currently %(default)s). Used with '
      '--build-dir to get the list of all buildbot masters on this host to '
      'monitor')
  p.add_argument(
      '--interval',
      default=300, type=int,
      help='time (in seconds) between sampling the buildbot master')

  logs.add_argparse_options(p)
  ts_mon.add_argparse_options(p)
  outer_loop.add_argparse_options(p)

  DEFAULT_ARG_VALUE = '(default)'

  p.set_defaults(
      ts_mon_flush='manual',
      ts_mon_target_type='task',
      ts_mon_task_service_name='mastermon',
      ts_mon_task_job_name=DEFAULT_ARG_VALUE,
  )
  opts = p.parse_args(argv)

  if opts.ts_mon_task_job_name == DEFAULT_ARG_VALUE:
    # The ts_mon job name defaults to either the hostname when monitoring all
    # masters on a host, or the name of the master extracted from the URL.
    if opts.build_dir:
      opts.ts_mon_task_job_name = opts.hostname
    else:
      parsed_url = urlparse.urlsplit(opts.url)
      path_components = [x for x in parsed_url.path.split('/') if x]
      if path_components:
        opts.ts_mon_task_job_name = path_components[-1]
      else:
        opts.ts_mon_task_job_name = parsed_url.netloc

  logs.process_argparse_options(opts)
  ts_mon.process_argparse_options(opts)
  loop_opts = outer_loop.process_argparse_options(opts)

  return opts, loop_opts
コード例 #7
0
ファイル: __main__.py プロジェクト: eunchong/infra
def parse_args():  # pragma: no cover
  parser = argparse.ArgumentParser(
      description='Manage the state of a buildbot master. NOTE: Does nothing '
                  'unless --prod is specified')
  parser.add_argument('directory', nargs='?',
      help='location of the master to manage')
  parser.add_argument('desired_state', nargs='?',
      choices=buildbot_state.STATES['desired_buildbot_state'],
      help='the desired state of the master')
  parser.add_argument('transition_time_utc', nargs='?', type=float,
      help='seconds since the UTC epoch to trigger the state')
  parser.add_argument('--list-all-states', action='store_true',
      help='list all states with their actions and exit')
  parser.add_argument('--builder-filter', action='append', default=[],
      help='appends a Python regular expression to the list of builder '
           'filters. By default, all builders count as building; if builder '
           'filters are supplied, only builders that match at least one filter '
           'will be counted.')
  parser.add_argument('--drain-timeout', metavar='SECONDS', type=int,
      default=buildbot_state.DEFAULT_DRAIN_TIMEOUT_SEC,
      help='sets the drain state timeout, in seconds.')
  parser.add_argument('--enable-gclient-sync', action='store_true',
      help='perform a gclient sync before every master start')
  parser.add_argument('--emergency-file',
      default='.stop_master_lifecycle',
      help='filename of the emergency stop file. if this file is found in the '
           'master directory, exit immediately')
  parser.add_argument('--hostname',
      default=socket.getfqdn(),
      help='override local hostname (currently %(default)s)')
  parser.add_argument('--prod', action='store_true',
      help='actually run commands instead of printing them.')
  parser.add_argument('--loop', action='store_true',
      help='repeatedly run the state machine. will not terminate unless killed')
  parser.add_argument('--loop-sleep-secs', type=int, default=5,
      help='how many seconds to wait between loop runs. default %(default)s')
  parser.add_argument('--connection-timeout', type=int, default=30,
      help='how many seconds to wait for a master http request before timing '
           'out.')
  outer_loop.add_argparse_options(parser)
  logs.add_argparse_options(parser)

  args = parser.parse_args()
  logs.process_argparse_options(args)

  if not args.list_all_states:
    if not args.directory:
      parser.error('A master directory must be specified.')
    if not args.transition_time_utc:
      parser.error('A transition time must be specified.')
    if not args.desired_state:
      parser.error('A desired state must be specified.')
  return args
コード例 #8
0
def parse_args(args):  # pragma: no cover
  parser = argparse.ArgumentParser('python -m %s' % __package__)
  parser.add_argument('--dry_run', action='store_true',
                      help='Do not actually push anything.')
  parser.add_argument('--repo_dir', metavar='DIR', default='tag_pusher_repos',
                      help=('The directory to use for git clones '
                            '(default: %(default)s)'))
  parser.add_argument('--spec_json', metavar='SPEC', required=True,
                      help=('JSON file with configuration: '
                            '{<repo_url>: [{"refs" : [<ref>], "url": <url>}]}'))
  parser.add_argument('--json_output', metavar='PATH',
                      help='Path to write JSON with results of the run to')
  logs.add_argparse_options(parser)
  outer_loop.add_argparse_options(parser)

  opts = parser.parse_args(args)

  logs.process_argparse_options(opts)
  loop_opts = outer_loop.process_argparse_options(opts)

  # Read and validate the spec JSON.
  with open(opts.spec_json, 'r') as f:
    spec = json.load(f)
  if not isinstance(spec, dict):
    parser.error('Expecting dict as a spec')
  for repo_url, push_list in spec.iteritems():
    # Repo URL.
    parsed = urlparse.urlparse(repo_url)
    if parsed.scheme not in ('https', 'git', 'file'):
      parser.error('Repo URL must use https, git or file protocol.')
    if not parsed.path.strip('/'):
      parser.error('Repo URL is missing a path?')
    # Ref and URL to fetch.
    for d in push_list:
      refs = d.get('refs') or []
      url_to_read = d.get('url')
      for ref in refs:
        if not ref or not ref.startswith('refs/'):
          parser.error('Ref to push should start with refs/')
      if not url_to_read or not url_to_read.startswith('https://'):
        parser.error('URL to read SHA1 from should use https')

  # git2.Repo -> [([ref_to_push], url_to_read)].
  spec_by_repo = {}
  for url, push_list in spec.iteritems():
    repo = git2.Repo(url)
    repo.dry_run = opts.dry_run
    repo.repos_dir = opts.repo_dir
    spec_by_repo[repo] = [(d['refs'], d['url']) for d in push_list]

  return Options(spec_by_repo, loop_opts, opts.json_output)
コード例 #9
0
def parse_args(args):  # pragma: no cover
    def check_url(s):
        parsed = urlparse.urlparse(s)
        if parsed.scheme not in ('https', 'git', 'file'):
            raise argparse.ArgumentTypeError(
                'Repo URL must use https, git or file protocol.')
        if not parsed.path.strip('/'):
            raise argparse.ArgumentTypeError('URL is missing a path?')
        return git2.Repo(s)

    parser = argparse.ArgumentParser('./run.py %s' % __package__)
    parser.add_argument('--dry_run',
                        action='store_true',
                        help='Do not actually push anything.')
    parser.add_argument('--repo_dir',
                        metavar='DIR',
                        default='gsubtreed_repos',
                        help=('The directory to use for git clones '
                              '(default: %(default)s)'))
    parser.add_argument('--json_output',
                        metavar='PATH',
                        help='Path to write JSON with results of the run to')
    parser.add_argument('repo',
                        nargs=1,
                        help='The url of the repo to act on.',
                        type=check_url)
    logs.add_argparse_options(parser)
    ts_mon.add_argparse_options(parser)
    outer_loop.add_argparse_options(parser)

    parser.set_defaults(
        ts_mon_target_type='task',
        ts_mon_task_service_name='gsubtreed',
    )

    opts = parser.parse_args(args)

    repo = opts.repo[0]
    repo.repos_dir = os.path.abspath(opts.repo_dir)

    if not opts.ts_mon_task_job_name:
        parsed_repo_url = urlparse.urlparse(repo.url)
        opts.ts_mon_task_job_name = '%s%s' % (parsed_repo_url.netloc,
                                              parsed_repo_url.path)

    logs.process_argparse_options(opts)
    ts_mon.process_argparse_options(opts)
    loop_opts = outer_loop.process_argparse_options(opts)

    return Options(repo, loop_opts, opts.json_output, opts.dry_run)
コード例 #10
0
ファイル: __main__.py プロジェクト: nicko96/Chrome-Infra
def main(args):
  parser = argparse.ArgumentParser(prog='run.py %s' % __package__)
  parser.add_argument('data_url', action='store', nargs='*')
  parser.add_argument('--use-cache', action='store_true')
  parser.add_argument('--master-filter', action='store')
  parser.add_argument('--builder-filter', action='store')
  parser.add_argument('--processes', default=PARALLEL_TASKS, action='store',
                      type=int)
  parser.add_argument('--jobs', default=CONCURRENT_TASKS, action='store',
                      type=int)
  logs.add_argparse_options(parser)
  outer_loop.add_argparse_options(parser)

  gatekeeper_json = os.path.join(build_scripts_dir, 'slave', 'gatekeeper.json')
  parser.add_argument('--gatekeeper', action='store', default=gatekeeper_json)
  gatekeeper_trees_json = os.path.join(build_scripts_dir, 'slave',
                                       'gatekeeper_trees.json')
  parser.add_argument('--gatekeeper-trees', action='store',
                      default=gatekeeper_trees_json)

  parser.add_argument('--findit-api-url',
                      help='Query findit results from this url.')

  args = parser.parse_args(args)
  logs.process_argparse_options(args)
  loop_args = outer_loop.process_argparse_options(args)

  # Suppress all logging from connectionpool; it is too verbose at info level.
  if args.log_level != logging.DEBUG:
    class _ConnectionpoolFilter(object):

      @staticmethod
      def filter(record):
        if record.levelno == logging.INFO:
          return False
        return True
    logging.getLogger('requests.packages.urllib3.connectionpool').addFilter(
        _ConnectionpoolFilter())

  def outer_loop_iteration():
    return inner_loop(args)

  loop_results = outer_loop.loop(
      task=outer_loop_iteration,
      sleep_timeout=lambda: 5,
      **loop_args)

  return 0 if loop_results.success else 1
コード例 #11
0
ファイル: __main__.py プロジェクト: xinghun61/infra
def parse_args(args):  # pragma: no cover
    parser = argparse.ArgumentParser('./run.py %s' % __package__)
    parser.add_argument(
        '-c',
        '--configfile',
        help='Local JSON poller configuration file to override '
        'config file from luci-config.')
    parser.add_argument(
        '-d',
        '--credentials_db',
        help=
        'File to use for OAuth2 credentials storage if not running on LUCI.')
    parser.add_argument(
        '--datadir',
        default=DATADIR,
        help='Directory where persistent app data should be stored.')
    parser.add_argument(
        '--dryrun',
        action='store_true',
        help='Don\'t update monorail issues or update issues to the bugdroid '
        'appengine app.')

    logs.add_argparse_options(parser)
    ts_mon.add_argparse_options(parser)
    outer_loop.add_argparse_options(parser)

    parser.set_defaults(log_level=logging.DEBUG,
                        ts_mon_target_type='task',
                        ts_mon_task_service_name='bugdroid',
                        ts_mon_task_job_name='bugdroid_job')
    opts = parser.parse_args(args)

    logs.process_argparse_options(opts)
    ts_mon.process_argparse_options(opts)
    loop_opts = outer_loop.process_argparse_options(opts)

    # We need to include the logger ID (i.e. "%(name)s") in the formatter string.
    # Override the root logging handler set by infra_libs.logs.
    logging.root.handlers[0].setFormatter(
        logging.Formatter(
            '[%(severity)s%(iso8601)s %(process)d %(thread)d '
            '%(fullModuleName)s:%(lineno)s] (%(name)s) %(message)s'))

    return opts, loop_opts
コード例 #12
0
ファイル: __main__.py プロジェクト: nicko96/Chrome-Infra
def parse_args(args):  # pragma: no cover
  def check_url(s):
    parsed = urlparse.urlparse(s)
    if parsed.scheme not in ('https', 'git', 'file'):
      raise argparse.ArgumentTypeError(
          'Repo URL must use https, git or file protocol.')
    if not parsed.path.strip('/'):
      raise argparse.ArgumentTypeError('URL is missing a path?')
    return git2.Repo(s)

  parser = argparse.ArgumentParser('./run.py %s' % __package__)
  parser.add_argument('--dry_run', action='store_true',
                      help='Do not actually push anything.')
  parser.add_argument('--repo_dir', metavar='DIR', default='gsubtreed_repos',
                      help=('The directory to use for git clones '
                            '(default: %(default)s)'))
  parser.add_argument('--json_output', metavar='PATH',
                      help='Path to write JSON with results of the run to')
  parser.add_argument('repo', nargs=1, help='The url of the repo to act on.',
                      type=check_url)
  logs.add_argparse_options(parser)
  ts_mon.add_argparse_options(parser)
  outer_loop.add_argparse_options(parser)

  parser.set_defaults(
      ts_mon_target_type='task',
      ts_mon_task_service_name='gsubtreed',
  )

  opts = parser.parse_args(args)

  repo = opts.repo[0]
  repo.dry_run = opts.dry_run
  repo.repos_dir = os.path.abspath(opts.repo_dir)

  if not opts.ts_mon_task_job_name:
    opts.ts_mon_task_job_name = urlparse.urlparse(repo.url).path

  logs.process_argparse_options(opts)
  ts_mon.process_argparse_options(opts)
  loop_opts = outer_loop.process_argparse_options(opts)

  return Options(repo, loop_opts, opts.json_output)
コード例 #13
0
ファイル: __main__.py プロジェクト: eunchong/infra
def main(args):
  parser = argparse.ArgumentParser(prog='run.py %s' % __package__)
  parser.add_argument('data_url', action='store', nargs='*')  # Deprecated
  parser.add_argument('--use-cache', action='store_true')
  parser.add_argument('--master-filter', action='store')
  parser.add_argument('--builder-filter', action='store')
  parser.add_argument('--processes', default=PARALLEL_TASKS, action='store',
                      type=int)
  parser.add_argument('--jobs', default=CONCURRENT_TASKS, action='store',
                      type=int)
  logs.add_argparse_options(parser)
  outer_loop.add_argparse_options(parser)

  gatekeeper_json = os.path.join(build_scripts_dir, 'slave', 'gatekeeper.json')
  parser.add_argument('--gatekeeper', action='store', default=gatekeeper_json)
  gatekeeper_trees_json = os.path.join(build_scripts_dir, 'slave',
                                       'gatekeeper_trees.json')
  parser.add_argument('--gatekeeper-trees', action='store',
                      default=gatekeeper_trees_json)

  parser.add_argument('--findit-api-url',
                      help='Query findit results from this url.')
  parser.add_argument('--crbug-service-account',
                      help='Path to a service account JSON file to be used to '
                           'search for relevant issues on crbug.com.')
  parser.add_argument('--use-monorail', default=False, action='store_true',
                      help='When specified, Monorail API is used to search for '
                           'issues on crbug')
  parser.add_argument('--api-endpoint-prefix',
                      help='Endpoint prefix for posting alerts. Old API '
                           'endpoint will be formed by adding value specified '
                           'in --old-api-path to the prefix, new API endpoints '
                           'will be formed by adding '
                           '/api/v1/alerts/<tree_name>.')
  parser.add_argument('--old-api-path',
                      help='Path to be appended to --api-endpoint-prefix to '
                           'form old API endpoint.')

  args = parser.parse_args(args)
  logs.process_argparse_options(args)
  loop_args = outer_loop.process_argparse_options(args)

  # TODO(sergiyb): Remove support for data_url when builder_alerts recipes are
  # updated and using new syntax to call this script.
  if args.data_url:
    if (len(args.data_url) == 1 and args.data_url[0].endswith('alerts') and
        not args.api_endpoint_prefix and not args.old_api_path):
      logging.warn(
          'You are using positional argument to specify URL to post updates '
          'to. Please use --api-endpoint-prefix and --old-api-path instead.')
      slash_index = args.data_url[0].rindex('/')
      args.api_endpoint_prefix = args.data_url[0][:slash_index]
      args.old_api_path = args.data_url[0][slash_index+1:]
    else:
      logging.error(
          'Unsupported positional argument(s) or used together with '
          '--api-endpoint-prefix/--old-api-path. Please use only '
          '--api-endpoint-prefix and --old-api-path to specify URL to post new '
          'alerts to.')
      return

  # Suppress all logging from connectionpool; it is too verbose at info level.
  if args.log_level != logging.DEBUG:
    class _ConnectionpoolFilter(object):

      @staticmethod
      def filter(record):
        if record.levelno == logging.INFO:
          return False
        return True
    logging.getLogger('requests.packages.urllib3.connectionpool').addFilter(
        _ConnectionpoolFilter())

  def outer_loop_iteration():
    return inner_loop(args)

  loop_results = outer_loop.loop(
      task=outer_loop_iteration,
      sleep_timeout=lambda: 5,
      **loop_args)

  return 0 if loop_results.success else 1
コード例 #14
0
def parse_args():  # pragma: no cover
    parser = argparse.ArgumentParser(
        description='Manage the state of a buildbot master. NOTE: Does nothing '
        'unless --prod is specified')
    parser.add_argument('directory',
                        nargs='?',
                        help='location of the master to manage')
    parser.add_argument(
        'desired_state',
        nargs='?',
        choices=buildbot_state.STATES['desired_buildbot_state'],
        help='the desired state of the master')
    parser.add_argument(
        'transition_time_utc',
        nargs='?',
        type=float,
        help='seconds since the UTC epoch to trigger the state')
    parser.add_argument('--list-all-states',
                        action='store_true',
                        help='list all states with their actions and exit')
    parser.add_argument(
        '--enable-gclient-sync',
        action='store_true',
        help='perform a gclient sync before every master start')
    parser.add_argument(
        '--emergency-file',
        default='.stop_master_lifecycle',
        help='filename of the emergency stop file. if this file is found in the '
        'master directory, exit immediately')
    parser.add_argument('--hostname',
                        default=socket.getfqdn(),
                        help='override local hostname (currently %(default)s)')
    parser.add_argument('--prod',
                        action='store_true',
                        help='actually run commands instead of printing them.')
    parser.add_argument(
        '--loop',
        action='store_true',
        help=
        'repeatedly run the state machine. will not terminate unless killed')
    parser.add_argument(
        '--loop-sleep-secs',
        type=int,
        default=5,
        help='how many seconds to wait between loop runs. default %(default)s')
    parser.add_argument(
        '--connection-timeout',
        type=int,
        default=30,
        help='how many seconds to wait for a master http request before timing '
        'out.')
    outer_loop.add_argparse_options(parser)
    logs.add_argparse_options(parser)

    args = parser.parse_args()
    logs.process_argparse_options(args)

    if not args.list_all_states:
        if not args.directory:
            parser.error('A master directory must be specified.')
        if not args.transition_time_utc:
            parser.error('A transition time must be specified.')
        if not args.desired_state:
            parser.error('A desired state must be specified.')
    return args