Esempio n. 1
0
def parse_args(argv):
    p = argparse.ArgumentParser()

    p.add_argument('--interval',
                   default=10,
                   type=int,
                   help='time (in seconds) between sampling system metrics')
    p.add_argument(
        '--root-setup',
        action='store_true',
        help='if this is set sysmon will run once to initialise configs in /etc '
        'and then exit immediately.  Used on GCE bots to bootstrap sysmon')

    logs.add_argparse_options(p)
    ts_mon.add_argparse_options(p)
    outer_loop.add_argparse_options(p)

    p.set_defaults(ts_mon_flush='manual')
    opts = p.parse_args(argv)

    logs.process_argparse_options(opts)
    ts_mon.process_argparse_options(opts)
    loop_opts = outer_loop.process_argparse_options(opts)

    return opts, loop_opts
Esempio n. 2
0
def parse_args(args):  # pragma: no cover
    parser = argparse.ArgumentParser('./run.py %s' % __package__)
    parser.add_argument(
        '-c',
        '--configfile',
        help='Local JSON poller configuration file to override '
        'confilg file from luci-config.')
    parser.add_argument(
        '-d',
        '--credentials_db',
        help='File to use for Codesite OAuth2 credentials storage.')
    parser.add_argument(
        '--datadir',
        default=DATADIR,
        help='Directory where persistent app data should be stored.')

    logs.add_argparse_options(parser)
    ts_mon.add_argparse_options(parser)
    outer_loop.add_argparse_options(parser)

    parser.set_defaults(ts_mon_target_type='task',
                        ts_mon_task_service_name='bugdroid',
                        ts_mon_task_job_name='bugdroid_job')
    opts = parser.parse_args(args)

    logs.process_argparse_options(opts)
    ts_mon.process_argparse_options(opts)
    loop_opts = outer_loop.process_argparse_options(opts)

    return opts, loop_opts
Esempio n. 3
0
def parse_args(argv):
  p = argparse.ArgumentParser()

  p.add_argument(
      '--interval',
      default=10, type=int,
      help='time (in seconds) between sampling system metrics')
  p.add_argument(
      '--root-setup',
      action='store_true',
      help='if this is set sysmon will run once to initialise configs in /etc '
           'and then exit immediately.  Used on GCE bots to bootstrap sysmon')

  logs.add_argparse_options(p)
  ts_mon.add_argparse_options(p)
  outer_loop.add_argparse_options(p)

  p.set_defaults(ts_mon_flush='manual')
  opts = p.parse_args(argv)

  logs.process_argparse_options(opts)
  ts_mon.process_argparse_options(opts)
  loop_opts = outer_loop.process_argparse_options(opts)

  return opts, loop_opts
Esempio n. 4
0
def parse_args(argv):
  if sys.platform == 'win32':
    default_state_directory = 'C:\\chrome-infra\\service-state'
    default_config_directory = 'C:\\chrome-infra\\service-config'
    default_root_directory = 'C:\\infra-python'
  else:
    default_state_directory = '/var/run/infra-services'
    default_config_directory = '/etc/infra-services'
    default_root_directory = '/opt/infra-python'

  p = argparse.ArgumentParser(
      description='Starts and stops machine-wide infra services with arguments '
                  'from config files')

  p.add_argument(
      '--state-directory',
      default=default_state_directory,
      help='directory to store PID files (default %(default)s)')
  p.add_argument(
      '--config-directory',
      default=default_config_directory,
      help='directory to read JSON config files (default %(default)s)')
  p.add_argument(
      '--root-directory',
      default=default_root_directory,
      help='directory where the service_manager package is deployed. If this '
           'package is updated the process will exit')

  p.add_argument(
      '--config-poll-interval',
      default=10,
      help='how frequently (in seconds) to poll the config directory')
  p.add_argument(
      '--service-poll-interval',
      default=10,
      help='how frequently (in seconds) to restart failed services')

  p.add_argument(
      '--root-setup',
      action='store_true',
      help='if this is set service_manager will run once to initialise configs '
           'in /etc and then exit immediately.  Used on GCE bots to bootstrap '
           'service_manager')

  logs.add_argparse_options(p)
  ts_mon.add_argparse_options(p)

  p.set_defaults(
      ts_mon_target_type='task',
      ts_mon_task_service_name='service_manager',
      ts_mon_task_job_name=socket.getfqdn(),
  )

  opts = p.parse_args(argv)

  logs.process_argparse_options(opts)
  ts_mon.process_argparse_options(opts)

  return opts
Esempio n. 5
0
def parse_args(argv):
  p = argparse.ArgumentParser()

  group = p.add_mutually_exclusive_group(required=True)
  group.add_argument(
      '--url',
      help='URL of one buildbot master to monitor')
  group.add_argument('--build-dir',
      help='location of the tools/build directory. Used with --hostname to get '
      'the list of all buildbot masters on this host to monitor. Cannot be '
      'used with --url')

  p.add_argument('--hostname',
      default=socket.getfqdn(),
      help='override local hostname (currently %(default)s). Used with '
      '--build-dir to get the list of all buildbot masters on this host to '
      'monitor')
  p.add_argument(
      '--interval',
      default=300, type=int,
      help='time (in seconds) between sampling the buildbot master')

  logs.add_argparse_options(p)
  ts_mon.add_argparse_options(p)
  outer_loop.add_argparse_options(p)

  DEFAULT_ARG_VALUE = '(default)'

  p.set_defaults(
      ts_mon_flush='manual',
      ts_mon_target_type='task',
      ts_mon_task_service_name='mastermon',
      ts_mon_task_job_name=DEFAULT_ARG_VALUE,
  )
  opts = p.parse_args(argv)

  if opts.ts_mon_task_job_name == DEFAULT_ARG_VALUE:
    # The ts_mon job name defaults to either the hostname when monitoring all
    # masters on a host, or the name of the master extracted from the URL.
    if opts.build_dir:
      opts.ts_mon_task_job_name = opts.hostname
    else:
      parsed_url = urlparse.urlsplit(opts.url)
      path_components = [x for x in parsed_url.path.split('/') if x]
      if path_components:
        opts.ts_mon_task_job_name = path_components[-1]
      else:
        opts.ts_mon_task_job_name = parsed_url.netloc

  logs.process_argparse_options(opts)
  ts_mon.process_argparse_options(opts)
  loop_opts = outer_loop.process_argparse_options(opts)

  return opts, loop_opts
Esempio n. 6
0
  def add_argparse_options(self, parser):
    """Register any arguments used by this application.

    Override this method and call parser.add_argument().

    Args:
      parser: An argparse.ArgumentParser object.
    """

    logs.add_argparse_options(parser)
    ts_mon.add_argparse_options(parser)
Esempio n. 7
0
def parse_args(argv):
  p = argparse.ArgumentParser()

  group = p.add_mutually_exclusive_group(required=True)
  group.add_argument(
      '--url',
      help='URL of one buildbot master to monitor')
  group.add_argument('--build-dir',
      help='location of the tools/build directory. Used with --hostname to get '
      'the list of all buildbot masters on this host to monitor. Cannot be '
      'used with --url')

  p.add_argument('--hostname',
      default=socket.getfqdn(),
      help='override local hostname (currently %(default)s). Used with '
      '--build-dir to get the list of all buildbot masters on this host to '
      'monitor')
  p.add_argument(
      '--interval',
      default=60, type=int,
      help='time (in seconds) between sampling the buildbot master')

  logs.add_argparse_options(p)
  ts_mon.add_argparse_options(p)
  outer_loop.add_argparse_options(p)

  DEFAULT_ARG_VALUE = '(default)'

  p.set_defaults(
      ts_mon_flush='manual',
      ts_mon_target_type='task',
      ts_mon_task_service_name='mastermon',
      ts_mon_task_job_name=DEFAULT_ARG_VALUE,
  )
  opts = p.parse_args(argv)

  if opts.ts_mon_task_job_name == DEFAULT_ARG_VALUE:
    # The ts_mon job name defaults to either the hostname when monitoring all
    # masters on a host, or the name of the master extracted from the URL.
    if opts.build_dir:
      opts.ts_mon_task_job_name = opts.hostname
    else:
      parsed_url = urlparse.urlsplit(opts.url)
      path_components = [x for x in parsed_url.path.split('/') if x]
      if path_components:
        opts.ts_mon_task_job_name = path_components[-1]
      else:
        opts.ts_mon_task_job_name = parsed_url.netloc

  logs.process_argparse_options(opts)
  ts_mon.process_argparse_options(opts)
  loop_opts = outer_loop.process_argparse_options(opts)

  return opts, loop_opts
Esempio n. 8
0
def parse_args():
  parser = argparse.ArgumentParser(
      description='Launches master_manager for every master on a host. NOTE: '
                  'does not perform any action unless --prod is set.')

  parser.add_argument('build_dir', nargs='?',
      help='location of the tools/build directory')
  parser.add_argument('--hostname',
      default=socket.getfqdn(),
      help='override local hostname (currently %(default)s)')
  parser.add_argument('--json-file',
      help='load desired master state from a file on disk')
  parser.add_argument('--json-gitiles',
      help='load desired master state from a gitiles location')
  parser.add_argument('--netrc',
      help='location of the netrc file when connecting to gitiles')
  parser.add_argument('--command-timeout',
      help='apply a timeout in seconds to each master_manager process')
  parser.add_argument('--verify', action='store_true',
      help='verify the desired master state JSON is valid, then exit')
  parser.add_argument('--prod', action='store_true',
      help='actually perform actions instead of doing a dry run')
  parser.add_argument('--processes',
      default=16, type=int,
      help='maximum number of master_manager processes to run simultaneously '
           '(default %(default)d)')

  ts_mon.add_argparse_options(parser)
  logs.add_argparse_options(parser)

  parser.set_defaults(
    ts_mon_target_type='task',
    ts_mon_task_job_name=socket.getfqdn().split(".")[0],
    ts_mon_task_service_name='master_manager_launcher',
    ts_mon_flush_mode='manual',
  )

  args = parser.parse_args()
  ts_mon.process_argparse_options(args)
  logs.process_argparse_options(args)

  if args.json_file and args.json_gitiles:
    parser.error("Can't specify --json-file and --json-gitiles simultaneously")

  if not args.json_gitiles and not args.json_file:
    parser.error('Must specify either --json-gitiles or --json-file.')

  if not args.verify:
    if not args.build_dir:
      parser.error('A build/ directory must be specified.')

  return args
Esempio n. 9
0
    def add_argparse_options(self, parser):
        """Register any arguments used by this application.

    Override this method and call parser.add_argument().

    Args:
      parser: An argparse.ArgumentParser object.
    """

        if self.USES_STANDARD_LOGGING:
            logs.add_argparse_options(parser)
        if self.USES_TS_MON:
            ts_mon.add_argparse_options(parser)
Esempio n. 10
0
def parse_args(argv):
  p = argparse.ArgumentParser(
      description='Starts and stops machine-wide infra services with arguments '
                  'from config files')

  p.add_argument(
      '--state-directory',
      default='/var/run/infra-services',
      help='directory to store PID files (default %(default)s)')
  p.add_argument(
      '--config-directory',
      default='/etc/infra-services',
      help='directory to read JSON config files (default %(default)s)')
  p.add_argument(
      '--root-directory',
      default='/opt/infra-python',
      help='directory where the service_manager package is deployed. If this '
           'package is updated the process will exit')

  p.add_argument(
      '--config-poll-interval',
      default=10,
      help='how frequently (in seconds) to poll the config directory')
  p.add_argument(
      '--service-poll-interval',
      default=10,
      help='how frequently (in seconds) to restart failed services')

  p.add_argument(
      '--root-setup',
      action='store_true',
      help='if this is set service_manager will run once to initialise configs '
           'in /etc and then exit immediately.  Used on GCE bots to bootstrap '
           'service_manager')

  logs.add_argparse_options(p)
  ts_mon.add_argparse_options(p)

  p.set_defaults(
      ts_mon_target_type='task',
      ts_mon_task_service_name='service_manager',
      ts_mon_task_job_name=socket.getfqdn(),
  )

  opts = p.parse_args(argv)

  logs.process_argparse_options(opts)
  ts_mon.process_argparse_options(opts)

  return opts
Esempio n. 11
0
def parse_args(args):  # pragma: no cover
    def check_url(s):
        parsed = urlparse.urlparse(s)
        if parsed.scheme not in ('https', 'git', 'file'):
            raise argparse.ArgumentTypeError(
                'Repo URL must use https, git or file protocol.')
        if not parsed.path.strip('/'):
            raise argparse.ArgumentTypeError('URL is missing a path?')
        return git2.Repo(s)

    parser = argparse.ArgumentParser('./run.py %s' % __package__)
    parser.add_argument('--dry_run',
                        action='store_true',
                        help='Do not actually push anything.')
    parser.add_argument('--repo_dir',
                        metavar='DIR',
                        default='gsubtreed_repos',
                        help=('The directory to use for git clones '
                              '(default: %(default)s)'))
    parser.add_argument('--json_output',
                        metavar='PATH',
                        help='Path to write JSON with results of the run to')
    parser.add_argument('repo',
                        nargs=1,
                        help='The url of the repo to act on.',
                        type=check_url)
    logs.add_argparse_options(parser)
    ts_mon.add_argparse_options(parser)
    outer_loop.add_argparse_options(parser)

    parser.set_defaults(
        ts_mon_target_type='task',
        ts_mon_task_service_name='gsubtreed',
    )

    opts = parser.parse_args(args)

    repo = opts.repo[0]
    repo.repos_dir = os.path.abspath(opts.repo_dir)

    if not opts.ts_mon_task_job_name:
        parsed_repo_url = urlparse.urlparse(repo.url)
        opts.ts_mon_task_job_name = '%s%s' % (parsed_repo_url.netloc,
                                              parsed_repo_url.path)

    logs.process_argparse_options(opts)
    ts_mon.process_argparse_options(opts)
    loop_opts = outer_loop.process_argparse_options(opts)

    return Options(repo, loop_opts, opts.json_output, opts.dry_run)
Esempio n. 12
0
def main(args):
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '-n',
        '--nice',
        type=int,
        metavar='VALUE',
        help='Set the nice level of the process to VALUE prior to execution.')
    parser.add_argument(
        'master_paths',
        nargs='+',
        help='The paths to the master base directories to monitor. Consider '
        'the /path/to/build/masters/master.* wildcard to specify all of '
        'them.')

    logs.add_argparse_options(parser)
    ts_mon.add_argparse_options(parser)

    # Parse arguments.
    args = parser.parse_args(args)
    logs.process_argparse_options(args)
    ts_mon.process_argparse_options(args)

    # Try setting the nice value; if it fails, eat the error and continue.
    if args.nice:
        logging.debug('Setting process "nice" to: %d', args.nice)
        try:
            os.nice(args.nice)
        except OSError as e:
            logging.error('Failed to update "nice" to %d: %s', args.nice, e)

    # Update global state calculations.
    logging.info('Pulling master state from: %s', args.master_paths)
    for master_path in args.master_paths:
        master_name = master_path_to_name(master_path)

        # Log to the target: buildbot/master/<master_name>
        target = ts_mon.TaskTarget('buildbot/master', master_name,
                                   args.ts_mon_task_region,
                                   args.ts_mon_task_hostname,
                                   args.ts_mon_task_number)
        logging.info('Collecting log state for master "%s" at: %s',
                     master_name, master_path)
        get_master_state(master_path, target)

    logging.info('Flushing collected information.')
    ts_mon.flush()
    return 0
Esempio n. 13
0
def main(args):
  parser = argparse.ArgumentParser()
  parser.add_argument('-n', '--nice', type=int, metavar='VALUE',
      help='Set the nice level of the process to VALUE prior to execution.')
  parser.add_argument('master_paths', nargs='+',
      help='The paths to the master base directories to monitor. Consider '
           'the /path/to/build/masters/master.* wildcard to specify all of '
           'them.')

  logs.add_argparse_options(parser)
  ts_mon.add_argparse_options(parser)

  # Parse arguments.
  args = parser.parse_args(args)
  logs.process_argparse_options(args)
  ts_mon.process_argparse_options(args)

  # Try setting the nice value; if it fails, eat the error and continue.
  if args.nice:
    logging.debug('Setting process "nice" to: %d', args.nice)
    try:
      os.nice(args.nice)
    except OSError as e:
      logging.error('Failed to update "nice" to %d: %s', args.nice, e)

  # Update global state calculations.
  logging.info('Pulling master state from: %s', args.master_paths)
  for master_path in args.master_paths:
    master_name = master_path_to_name(master_path)

    # Log to the target: buildbot/master/<master_name>
    target = ts_mon.TaskTarget(
        'buildbot/master',
        master_name,
        args.ts_mon_task_region,
        args.ts_mon_task_hostname,
        args.ts_mon_task_number)
    logging.info('Collecting log state for master "%s" at: %s',
                 master_name, master_path)
    get_master_state(master_path, target)

  logging.info('Flushing collected information.')
  ts_mon.flush()
  return 0
Esempio n. 14
0
def parse_args(args):  # pragma: no cover
    parser = argparse.ArgumentParser('./run.py %s' % __package__)
    parser.add_argument(
        '-c',
        '--configfile',
        help='Local JSON poller configuration file to override '
        'config file from luci-config.')
    parser.add_argument(
        '-d',
        '--credentials_db',
        help=
        'File to use for OAuth2 credentials storage if not running on LUCI.')
    parser.add_argument(
        '--datadir',
        default=DATADIR,
        help='Directory where persistent app data should be stored.')
    parser.add_argument(
        '--dryrun',
        action='store_true',
        help='Don\'t update monorail issues or update issues to the bugdroid '
        'appengine app.')

    logs.add_argparse_options(parser)
    ts_mon.add_argparse_options(parser)
    outer_loop.add_argparse_options(parser)

    parser.set_defaults(log_level=logging.DEBUG,
                        ts_mon_target_type='task',
                        ts_mon_task_service_name='bugdroid',
                        ts_mon_task_job_name='bugdroid_job')
    opts = parser.parse_args(args)

    logs.process_argparse_options(opts)
    ts_mon.process_argparse_options(opts)
    loop_opts = outer_loop.process_argparse_options(opts)

    # We need to include the logger ID (i.e. "%(name)s") in the formatter string.
    # Override the root logging handler set by infra_libs.logs.
    logging.root.handlers[0].setFormatter(
        logging.Formatter(
            '[%(severity)s%(iso8601)s %(process)d %(thread)d '
            '%(fullModuleName)s:%(lineno)s] (%(name)s) %(message)s'))

    return opts, loop_opts
Esempio n. 15
0
def get_arguments(argv):
  parser = argparse.ArgumentParser(
    formatter_class=argparse.RawDescriptionHelpFormatter,
    description=textwrap.dedent("""
    CLI to send data via ts_mon from outside infra.git.
    Example invocation:

    run.py infra.tools.send_ts_mon_values \\
        --verbose
        --ts-mon-endpoint=file:///tmp/send_ts_mon_value.log \\
        --ts-mon-target-type task \\
        --ts-mon-task-service-name generic_system \\
        --ts-mon-task-job-name chromium \\
        --gauge='{"name":"task/m1", "value":18, "custom_field": "value"}' \\
        --float='{"name":"task/m2", "value":45}'

    The argument to a metric argument (like --gauge, --float) must be a json
    string. The names 'name', 'value' and 'start_time' are reserved because
    they represent the metric name, and value (start_time only for --counter and
    --cumulative). All the other keys are expected to be metric fields (with
    a maximum of seven).
    """))

  metrics_group = parser.add_argument_group('Metric types')
  metrics_group.add_argument('--gauge', metavar='JSON', action='append',
                             help="Send data for a gauge metric.")
  metrics_group.add_argument('--float', metavar='JSON', action='append',
                             help="Send data for a float metric.")
  metrics_group.add_argument('--string', metavar='JSON', action='append',
                             help="Send data for a string metric.")
  metrics_group.add_argument('--bool', '--boolean',
                             metavar='JSON', action='append',
                             help="Send data for a boolean metric.")
  metrics_group.add_argument('--counter', metavar='JSON', action='append',
                             help="Send data for a counter metric.")
  metrics_group.add_argument('--cumulative', metavar='JSON', action='append',
                             help="Send data for a cumulative metric.")

  infra_libs.logs.add_argparse_options(parser)
  ts_mon.add_argparse_options(parser)
  args = parser.parse_args(argv)
  # Forcing manual flush here for efficiency.
  args.ts_mon_flush = 'manual'
  return args
Esempio n. 16
0
def parse_args(args):  # pragma: no cover
  def check_url(s):
    parsed = urlparse.urlparse(s)
    if parsed.scheme not in ('https', 'git', 'file'):
      raise argparse.ArgumentTypeError(
          'Repo URL must use https, git or file protocol.')
    if not parsed.path.strip('/'):
      raise argparse.ArgumentTypeError('URL is missing a path?')
    return git2.Repo(s)

  parser = argparse.ArgumentParser('./run.py %s' % __package__)
  parser.add_argument('--dry_run', action='store_true',
                      help='Do not actually push anything.')
  parser.add_argument('--repo_dir', metavar='DIR', default='gsubtreed_repos',
                      help=('The directory to use for git clones '
                            '(default: %(default)s)'))
  parser.add_argument('--json_output', metavar='PATH',
                      help='Path to write JSON with results of the run to')
  parser.add_argument('repo', nargs=1, help='The url of the repo to act on.',
                      type=check_url)
  logs.add_argparse_options(parser)
  ts_mon.add_argparse_options(parser)
  outer_loop.add_argparse_options(parser)

  parser.set_defaults(
      ts_mon_target_type='task',
      ts_mon_task_service_name='gsubtreed',
  )

  opts = parser.parse_args(args)

  repo = opts.repo[0]
  repo.dry_run = opts.dry_run
  repo.repos_dir = os.path.abspath(opts.repo_dir)

  if not opts.ts_mon_task_job_name:
    opts.ts_mon_task_job_name = urlparse.urlparse(repo.url).path

  logs.process_argparse_options(opts)
  ts_mon.process_argparse_options(opts)
  loop_opts = outer_loop.process_argparse_options(opts)

  return Options(repo, loop_opts, opts.json_output)
Esempio n. 17
0
def parse_args(args):
  parser = argparse.ArgumentParser('python -m %s' % __package__)
  parser.add_argument('--project', required=True)
  parser.add_argument('--range', required=True)
  logs.add_argparse_options(parser)
  ts_mon.add_argparse_options(parser)

  parser.set_defaults(
      logs_directory='',
      ts_mon_target_type='task',
      ts_mon_task_service_name='cq_stats_uploader',
  )

  opts = parser.parse_args(args)

  if not opts.ts_mon_task_job_name:
    opts.ts_mon_task_job_name = '%s-%s' % (opts.project, opts.range)

  logs.process_argparse_options(opts)
  ts_mon.process_argparse_options(opts)

  return opts
Esempio n. 18
0
def get_arguments(argv):
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description=textwrap.dedent("""
    CLI to send data via ts_mon from outside infra.git.
    Example invocation:

    run.py infra.tools.send_ts_mon_values \\
        --verbose
        --ts-mon-endpoint=file:///tmp/send_ts_mon_value.log \\
        --ts-mon-target-type task \\
        --ts-mon-task-service-name generic_system \\
        --ts-mon-task-job-name chromium \\
        --gauge='{"name":"task/m1", "value":18, "custom_field": "value"}' \\
        --float='{"name":"task/m2", "value":45}' \\
        --counter='{"name":"task/count", "start_time": 149523409, \\
                    "value": 42}'

    The argument to a metric argument (like --gauge, --float) must be a json
    string. The names 'name', 'value' and 'start_time' are reserved because
    they represent the metric name, and value (start_time only for --counter and
    --cumulative, in seconds since UNIX epoch). All the other keys are
    expected to be metric fields (with a maximum of seven).

    Note, that all points in the same metric must have the same set of metric
    fields. This must be true for all instances of the metric globally,
    otherwise the metric will be rejected by the ts_mon endpoint.

    Also, all cumulative metric points must have the same start_time value.
    """))

    metrics_group = parser.add_argument_group('Metric types')
    metrics_group.add_argument('--gauge',
                               metavar='JSON',
                               action='append',
                               help="Send data for a gauge metric. The json "
                               "string can be base64-encoded.")
    metrics_group.add_argument(
        '--gauge-file',
        metavar='PATH',
        action='append',
        help="Same as --gauge but read json from a file, "
        "one entry per line.")

    metrics_group.add_argument('--float',
                               metavar='JSON',
                               action='append',
                               help="Send data for a float metric.")
    metrics_group.add_argument(
        '--float-file',
        metavar='PATH',
        action='append',
        help="Same as --float but read json from a file, "
        "one entry per line.")

    metrics_group.add_argument('--string',
                               metavar='JSON',
                               action='append',
                               help="Send data for a string metric. The json "
                               "string can be base64-encoded.")
    metrics_group.add_argument(
        '--string-file',
        metavar='PATH',
        action='append',
        help="Same as --string but read json from a file, "
        "one entry per line.")

    metrics_group.add_argument('--bool',
                               '--boolean',
                               metavar='JSON',
                               action='append',
                               help="Send data for a boolean metric. The json "
                               "string can be base64-encoded")
    metrics_group.add_argument(
        '--bool-file',
        metavar='PATH',
        action='append',
        help="Same as --bool but read json from a file, "
        "one entry per line.")

    metrics_group.add_argument('--counter',
                               metavar='JSON',
                               action='append',
                               help="Send data for a counter metric.")
    metrics_group.add_argument(
        '--counter-file',
        metavar='PATH',
        action='append',
        help="Same as --counter but read json from a file,"
        " one entry per line.")

    metrics_group.add_argument(
        '--cumulative',
        metavar='JSON',
        action='append',
        help="Send data for a cumulative metric. The json "
        "string can be base64 encoded")
    metrics_group.add_argument(
        '--cumulative-file',
        metavar='PATH',
        action='append',
        help="Same as --cumulative but read json from a "
        "file, one entry per line.")

    infra_libs.logs.add_argparse_options(parser)
    ts_mon.add_argparse_options(parser)
    args = parser.parse_args(argv)
    # Forcing manual flush here for efficiency.
    args.ts_mon_flush = 'manual'
    return args
Esempio n. 19
0
def parse_args():
    parser = argparse.ArgumentParser(
        description='Launches master_manager for every master on a host. NOTE: '
        'does not perform any action unless --prod is set.')

    parser.add_argument('build_dir',
                        nargs='?',
                        help='location of the tools/build directory')
    parser.add_argument('--hostname',
                        default=socket.getfqdn(),
                        help='override local hostname (currently %(default)s)')
    parser.add_argument('--json-file',
                        help='load desired master state from a file on disk')
    parser.add_argument(
        '--json-gitiles',
        help='load desired master state from a gitiles location')
    parser.add_argument(
        '--netrc',
        help='location of the netrc file when connecting to gitiles')
    parser.add_argument(
        '--command-timeout',
        help='apply a timeout in seconds to each master_manager process')
    parser.add_argument(
        '--verify',
        action='store_true',
        help='verify the desired master state JSON is valid, then exit')
    parser.add_argument(
        '--prod',
        action='store_true',
        help='actually perform actions instead of doing a dry run')
    parser.add_argument(
        '--processes',
        default=16,
        type=int,
        help='maximum number of master_manager processes to run simultaneously '
        '(default %(default)d)')

    ts_mon.add_argparse_options(parser)
    logs.add_argparse_options(parser)

    parser.set_defaults(
        ts_mon_target_type='task',
        ts_mon_task_job_name=socket.getfqdn().split(".")[0],
        ts_mon_task_service_name='master_manager_launcher',
        ts_mon_flush_mode='manual',
    )

    args = parser.parse_args()
    ts_mon.process_argparse_options(args)
    logs.process_argparse_options(args)

    if args.json_file and args.json_gitiles:
        parser.error(
            "Can't specify --json-file and --json-gitiles simultaneously")

    if not args.json_gitiles and not args.json_file:
        parser.error('Must specify either --json-gitiles or --json-file.')

    if not args.verify:
        if not args.build_dir:
            parser.error('A build/ directory must be specified.')

    return args
Esempio n. 20
0
def get_arguments(argv):
  parser = argparse.ArgumentParser(
    formatter_class=argparse.RawDescriptionHelpFormatter,
    description=textwrap.dedent("""
    CLI to send data via ts_mon from outside infra.git.
    Example invocation:

    run.py infra.tools.send_ts_mon_values \\
        --verbose
        --ts-mon-endpoint=file:///tmp/send_ts_mon_value.log \\
        --ts-mon-target-type task \\
        --ts-mon-task-service-name generic_system \\
        --ts-mon-task-job-name chromium \\
        --gauge='{"name":"task/m1", "value":18, "custom_field": "value"}' \\
        --float='{"name":"task/m2", "value":45}' \\
        --counter='{"name":"task/count", "start_time": 149523409, \\
                    "value": 42}'

    The argument to a metric argument (like --gauge, --float) must be a json
    string. The names 'name', 'value' and 'start_time' are reserved because
    they represent the metric name, and value (start_time only for --counter and
    --cumulative, in seconds since UNIX epoch). All the other keys are
    expected to be metric fields (with a maximum of seven).

    Note, that all points in the same metric must have the same set of metric
    fields. This must be true for all instances of the metric globally,
    otherwise the metric will be rejected by the ts_mon endpoint.

    Also, all cumulative metric points must have the same start_time value.
    """))

  metrics_group = parser.add_argument_group('Metric types')
  metrics_group.add_argument('--gauge', metavar='JSON', action='append',
                             help="Send data for a gauge metric. The json "
                             "string can be base64-encoded.")
  metrics_group.add_argument('--gauge-file', metavar='PATH', action='append',
                             help="Same as --gauge but read json from a file, "
                             "one entry per line.")

  metrics_group.add_argument('--float', metavar='JSON', action='append',
                             help="Send data for a float metric.")
  metrics_group.add_argument('--float-file', metavar='PATH', action='append',
                             help="Same as --float but read json from a file, "
                             "one entry per line.")

  metrics_group.add_argument('--string', metavar='JSON', action='append',
                             help="Send data for a string metric. The json "
                             "string can be base64-encoded.")
  metrics_group.add_argument('--string-file', metavar='PATH', action='append',
                             help="Same as --string but read json from a file, "
                             "one entry per line.")

  metrics_group.add_argument('--bool', '--boolean',
                             metavar='JSON', action='append',
                             help="Send data for a boolean metric. The json "
                             "string can be base64-encoded")
  metrics_group.add_argument('--bool-file', metavar='PATH', action='append',
                             help="Same as --bool but read json from a file, "
                             "one entry per line.")

  metrics_group.add_argument('--counter', metavar='JSON', action='append',
                             help="Send data for a counter metric.")
  metrics_group.add_argument('--counter-file', metavar='PATH', action='append',
                             help="Same as --counter but read json from a file,"
                             " one entry per line.")

  metrics_group.add_argument('--cumulative', metavar='JSON', action='append',
                             help="Send data for a cumulative metric. The json "
                             "string can be base64 encoded")
  metrics_group.add_argument('--cumulative-file', metavar='PATH',
                             action='append',
                             help="Same as --cumulative but read json from a "
                             "file, one entry per line.")

  infra_libs.logs.add_argparse_options(parser)
  ts_mon.add_argparse_options(parser)
  args = parser.parse_args(argv)
  # Forcing manual flush here for efficiency.
  args.ts_mon_flush = 'manual'
  return args
Esempio n. 21
0
def get_arguments(argv):
  """Process command-line arguments.

  Args:
    argv (list of strings): sys.argv[1:]
  Returns:
    args (argparse.Namespace): processed command-line arguments
  """
  # This function must be testable. Put non-testable side-effects
  # in main().

  parser = argparse.ArgumentParser(
    description="""Send an event to the monitoring pipeline.

    Examples:
    run.py infra.tools.send_monitoring_event --service-event-type=START \\
                                     --service-event-revinfo <filename>

    run.py infra.tools.send_monitoring_event \\
                                     --service-event-stack-trace "<stack trace>"

    run.py infra.tools.send_monitoring_event --build-event-type=SCHEDULER \\
                                     --build-event-build-name=foo
                                     --build-event-hostname='bot.dns.name'
    """, formatter_class=argparse.RawTextHelpFormatter)

  # Common fields
  common_group = parser.add_argument_group('Common event options')
  common_group.add_argument('--event-mon-timestamp-kind',
                            choices=event_mon.TIMESTAMP_KINDS,
                            help='General kind of event. This value is used '
                            'e.g. to\nautomatically compute durations between '
                            'START and STOP\nevents.')
  common_group.add_argument('--event-mon-event-timestamp', type=int,
                            help='Timestamp when the event was generated, as '
                            'number of\nmilliseconds since the Unix EPOCH.'
                            '\nDefaults to current time.')

  # Service event
  service_group = parser.add_argument_group('Service event options')
  type_group = service_group.add_mutually_exclusive_group()
  type_group.add_argument('--service-event-type',
                          choices=event_mon.EVENT_TYPES,
                          help='Kind of event to send.')

  type_group.add_argument('--service-event-stack-trace',
                          metavar='STACK_TRACE',
                          help='String containing a stack trace. Sets the event'
                          ' type\nto "CRASH" automatically.')

  revinfo = service_group.add_mutually_exclusive_group()
  revinfo.add_argument('--service-event-revinfo',
                       metavar='FILENAME',
                       help='File to read revision information from, "-" means'
                       '\nstandard input. The file'
                       ' is supposed to contain the\noutput of'
                       ' "gclient revinfo -a".')
  revinfo.add_argument('--service-event-revinfo-from-gclient',
                       action='store_true',
                       help='Calls gclient to get revision information. '
                       '\nMutually exclusive with --service-event-revinfo')

  # Build events
  build_group = parser.add_argument_group('Build event options')
  build_group.add_argument('--build-event-type',
                           choices=event_mon.BUILD_EVENT_TYPES,
                           help='Type of the build event.')
  build_group.add_argument('--build-event-hostname',
                           metavar='HOSTNAME',
                           help='Hostname of the bot running the build.')
  build_group.add_argument('--build-event-build-name',
                           metavar='BUILDER_NAME',
                           help='Builder name as known to Buildbot.')
  build_group.add_argument('--build-event-build-number',
                           type=int,
                           metavar='BUILD_NUMBER',
                           help='Build number as known to Buildbot')
  build_group.add_argument('--build-event-build-scheduling-time',
                           type=int,
                           metavar='TIMESTAMP',
                           help='Timestamp (in milliseconds since the epoch),'
                           ' when the\nbuild was scheduled. Used to tell '
                           'apart builds with\n identical build numbers.')
  build_group.add_argument('--build-event-step-name',
                           metavar='STEP_NAME',
                           help='Step name as known to Buildbot.')
  build_group.add_argument('--build-event-step-number',
                           type=int,
                           metavar='BUILD_NUMBER',
                           help='Step number inside the build. Zero-based.')
  build_group.add_argument('--build-event-result',
                           choices=event_mon.BUILD_RESULTS,
                           help='Result of build or step depending on '
                           'whether any \n--build-event-step-* options have '
                           'been provided or not.')

  build_group.add_argument('--build-event-extra-result-code',
                           help='Extra result code. String, comma-separated '
                           'list of strings or json-encoded list of string. '
                           'Each one must be less than 20 characters long.')
  build_group.add_argument('--build-event-patch-url',
                           help='URL of the patchset that triggered build')

  build_group.add_argument('--build-event-goma-stats-path',
                           metavar='FILENAME',
                           help='File containing a serialized GomaStats '
                           'protobuf.')
  build_group.add_argument('--build-event-goma-error',
                           choices=event_mon.GOMA_ERROR_TYPES,
                           help='Reason for no GomaStats protobuf.')
  build_group.add_argument('--build-event-goma-crash-report-id-path',
                           metavar='FILENAME',
                           help='File containing a crash report id.')

  # Read events from file
  file_group = parser.add_argument_group('Read events from file')
  file_group.add_argument('--events-from-file',
                          metavar='FILENAME', nargs='*',
                          help='File containing events as json dict. This '
                          'option\nis incompatible with --build-event-type and'
                          '\n--service-event-type.\nSee '
                          'send_event.read_events_from_file for details\n'
                          'on the format. This option can be passed multiple\n'
                          'times, and wildcards can be used.')
  file_group.add_argument('--delete-file-when-sent',
                          action='store_true', default=False,
                          help='If all events read from a file have been '
                          'successfully\nsent to the endpoint, delete the '
                          'file. By default\nfiles are kept. This does not '
                          'affect the file pointed to by '
                          '--event-logrequest-path')

  file_group.add_argument('--event-logrequest-path',
                          metavar='FILENAME',
                          help='File containing a serialized LogRequestLite'
                          'proto, containing a single ChromeInfraEvent that '
                          'will be used as the default event. Such a file can '
                          'be generated by passing "file" to '
                          '--event-mon-run-type.')

  ts_mon.add_argparse_options(parser)
  event_mon.add_argparse_options(parser)
  infra_libs.logs.add_argparse_options(parser)

  parser.set_defaults(
      ts_mon_flush='manual',
      ts_mon_target_type='task',
      ts_mon_task_service_name='send_monitoring_event',
      ts_mon_task_job_name='manual',
  )

  args = parser.parse_args(argv)

  if args.service_event_stack_trace:
    args.service_event_type = 'CRASH'

  if args.build_event_type and args.service_event_type:
    parser.error('Only one type of event can be sent at once. '
                 'Got both --build-event-type and --service-event-type.')
  if ((args.build_event_type and args.events_from_file)
      or (args.service_event_type and args.events_from_file)):
    parser.error('--events-from-file is not compatible with either'
                 '--service-event-type or --build-event-type.')

  # Convert extra_result_code to a list when needed.
  if args.build_event_extra_result_code:
    extra_result_code = args.build_event_extra_result_code.strip()
    if extra_result_code.startswith('['):
      extra_result_code = json.loads(extra_result_code)
    elif ',' in extra_result_code:
      extra_result_code = extra_result_code.split(',')
    args.build_event_extra_result_code = extra_result_code

  return args
Esempio n. 22
0
def get_arguments(argv):
    """Process command-line arguments.

  Args:
    argv (list of strings): sys.argv[1:]
  Returns:
    args (argparse.Namespace): processed command-line arguments
  """
    # This function must be testable. Put non-testable side-effects
    # in main().

    parser = argparse.ArgumentParser(
        description="""Send an event to the monitoring pipeline.

    Examples:
    run.py infra.tools.send_monitoring_event --service-event-type=START \\
                                     --service-event-revinfo <filename>

    run.py infra.tools.send_monitoring_event \\
                                     --service-event-stack-trace "<stack trace>"

    run.py infra.tools.send_monitoring_event --build-event-type=SCHEDULER \\
                                     --build-event-build-name=foo
                                     --build-event-hostname='bot.dns.name'
    """,
        formatter_class=argparse.RawTextHelpFormatter)

    # Common fields
    common_group = parser.add_argument_group('Common event options')
    common_group.add_argument(
        '--event-mon-timestamp-kind',
        choices=[kind for kind in event_mon.TIMESTAMP_KINDS if kind],
        default='POINT',
        help='General kind of event. This value is used '
        'e.g. to\nautomatically compute durations between '
        'START and STOP\nevents. Default: %(default)s')
    common_group.add_argument(
        '--event-mon-event-timestamp',
        type=int,
        help='Timestamp when the event was generated, as '
        'number of\nmilliseconds since the Unix EPOCH.'
        '\nDefaults to current time.')

    # Service event
    service_group = parser.add_argument_group('Service event options')
    type_group = service_group.add_mutually_exclusive_group()
    type_group.add_argument('--service-event-type',
                            choices=event_mon.EVENT_TYPES,
                            help='Kind of event to send.')

    type_group.add_argument(
        '--service-event-stack-trace',
        metavar='STACK_TRACE',
        help='String containing a stack trace. Sets the event'
        ' type\nto "CRASH" automatically.')

    revinfo = service_group.add_mutually_exclusive_group()
    revinfo.add_argument(
        '--service-event-revinfo',
        metavar='FILENAME',
        help='File to read revision information from, "-" means'
        '\nstandard input. The file'
        ' is supposed to contain the\noutput of'
        ' "gclient revinfo -a".')
    revinfo.add_argument('--service-event-revinfo-from-gclient',
                         action='store_true',
                         help='Calls gclient to get revision information. '
                         '\nMutually exclusive with --service-event-revinfo')

    # Build events
    build_group = parser.add_argument_group('Build event options')
    build_group.add_argument('--build-event-type',
                             choices=event_mon.BUILD_EVENT_TYPES,
                             help='Type of the build event.')
    build_group.add_argument('--build-event-hostname',
                             metavar='HOSTNAME',
                             help='Hostname of the bot running the build.')
    build_group.add_argument('--build-event-build-name',
                             metavar='BUILDER_NAME',
                             help='Builder name as known to Buildbot.')
    build_group.add_argument('--build-event-build-number',
                             type=int,
                             metavar='BUILD_NUMBER',
                             help='Build number as known to Buildbot')
    build_group.add_argument(
        '--build-event-build-scheduling-time',
        type=int,
        metavar='TIMESTAMP',
        help='Timestamp (in milliseconds since the epoch),'
        ' when the\nbuild was scheduled. Used to tell '
        'apart builds with\n identical build numbers.')
    build_group.add_argument('--build-event-step-name',
                             metavar='STEP_NAME',
                             help='Step name as known to Buildbot.')
    build_group.add_argument('--build-event-step-number',
                             type=int,
                             metavar='BUILD_NUMBER',
                             help='Step number inside the build. Zero-based.')
    build_group.add_argument('--build-event-result',
                             choices=event_mon.BUILD_RESULTS,
                             help='Result of build or step depending on '
                             'whether any \n--build-event-step-* options have '
                             'been provided or not.')

    # Read events from file
    file_group = parser.add_argument_group('Read events from file')
    file_group.add_argument(
        '--events-from-file',
        metavar='FILENAME',
        nargs='*',
        help='File containing events as json dict. This '
        'option\nis incompatible with --build-event-type and'
        '\n--service-event-type.\nSee '
        'send_event.read_events_from_file for details\n'
        'on the format. This options can be passed multiple\n'
        'times, and wildcards can be used.')
    file_group.add_argument('--delete-file-when-sent',
                            action='store_true',
                            default=False,
                            help='If all events read from a file have been '
                            'successfully\nsent to the endpoint, delete the '
                            'file. By default\nfiles are kept.')

    ts_mon.add_argparse_options(parser)
    event_mon.add_argparse_options(parser)
    infra_libs.logs.add_argparse_options(parser)

    parser.set_defaults(
        ts_mon_flush='manual',
        ts_mon_target_type='task',
        ts_mon_task_service_name='send_monitoring_event',
        ts_mon_task_job_name='manual',
    )

    args = parser.parse_args(argv)

    if args.service_event_stack_trace:
        args.service_event_type = 'CRASH'

    if args.build_event_type and args.service_event_type:
        parser.error('Only one type of event can be sent at once. '
                     'Got both --build-event-type and --service-event-type.')
    if ((args.build_event_type and args.events_from_file)
            or (args.service_event_type and args.events_from_file)):
        parser.error('--events-from-file is not compatible with either'
                     '--service-event-type or --build-event-type.')
    return args
Esempio n. 23
0
def get_arguments(argv):
    """Process command-line arguments.

  Args:
    argv (list of strings): sys.argv[1:]
  Returns:
    args (argparse.Namespace): processed command-line arguments
  """
    # This function must be testable. Put non-testable side-effects
    # in main().

    parser = argparse.ArgumentParser(
        description="""Send an event to the monitoring pipeline.

    Examples:
    run.py infra.tools.send_monitoring_event --service-event-type=START \\
                                     --service-event-revinfo <filename>

    run.py infra.tools.send_monitoring_event \\
                                     --service-event-stack-trace "<stack trace>"

    run.py infra.tools.send_monitoring_event --build-event-type=SCHEDULER \\
                                     --build-event-build-name=foo
                                     --build-event-hostname='bot.dns.name'
    """,
        formatter_class=argparse.RawTextHelpFormatter,
    )

    # Common fields
    common_group = parser.add_argument_group("Common event options")
    common_group.add_argument(
        "--event-mon-timestamp-kind",
        choices=[kind for kind in event_mon.TIMESTAMP_KINDS if kind],
        default="POINT",
        help="General kind of event. This value is used "
        "e.g. to\nautomatically compute durations between "
        "START and STOP\nevents. Default: %(default)s",
    )
    common_group.add_argument(
        "--event-mon-event-timestamp",
        type=int,
        help="Timestamp when the event was generated, as "
        "number of\nmilliseconds since the Unix EPOCH."
        "\nDefaults to current time.",
    )

    # Service event
    service_group = parser.add_argument_group("Service event options")
    type_group = service_group.add_mutually_exclusive_group()
    type_group.add_argument("--service-event-type", choices=event_mon.EVENT_TYPES, help="Kind of event to send.")

    type_group.add_argument(
        "--service-event-stack-trace",
        metavar="STACK_TRACE",
        help="String containing a stack trace. Sets the event" ' type\nto "CRASH" automatically.',
    )

    revinfo = service_group.add_mutually_exclusive_group()
    revinfo.add_argument(
        "--service-event-revinfo",
        metavar="FILENAME",
        help='File to read revision information from, "-" means'
        "\nstandard input. The file"
        " is supposed to contain the\noutput of"
        ' "gclient revinfo -a".',
    )
    revinfo.add_argument(
        "--service-event-revinfo-from-gclient",
        action="store_true",
        help="Calls gclient to get revision information. " "\nMutually exclusive with --service-event-revinfo",
    )

    # Build events
    build_group = parser.add_argument_group("Build event options")
    build_group.add_argument("--build-event-type", choices=event_mon.BUILD_EVENT_TYPES, help="Type of the build event.")
    build_group.add_argument(
        "--build-event-hostname", metavar="HOSTNAME", help="Hostname of the bot running the build."
    )
    build_group.add_argument(
        "--build-event-build-name", metavar="BUILDER_NAME", help="Builder name as known to Buildbot."
    )
    build_group.add_argument(
        "--build-event-build-number", type=int, metavar="BUILD_NUMBER", help="Build number as known to Buildbot"
    )
    build_group.add_argument(
        "--build-event-build-scheduling-time",
        type=int,
        metavar="TIMESTAMP",
        help="Timestamp (in milliseconds since the epoch),"
        " when the\nbuild was scheduled. Used to tell "
        "apart builds with\n identical build numbers.",
    )
    build_group.add_argument("--build-event-step-name", metavar="STEP_NAME", help="Step name as known to Buildbot.")
    build_group.add_argument(
        "--build-event-step-number", type=int, metavar="BUILD_NUMBER", help="Step number inside the build. Zero-based."
    )
    build_group.add_argument(
        "--build-event-result",
        choices=event_mon.BUILD_RESULTS,
        help="Result of build or step depending on "
        "whether any \n--build-event-step-* options have "
        "been provided or not.",
    )

    # Read events from file
    file_group = parser.add_argument_group("Read events from file")
    file_group.add_argument(
        "--events-from-file",
        metavar="FILENAME",
        nargs="*",
        help="File containing events as json dict. This "
        "option\nis incompatible with --build-event-type and"
        "\n--service-event-type.\nSee "
        "send_event.read_events_from_file for details\n"
        "on the format. This options can be passed multiple\n"
        "times, and wildcards can be used.",
    )
    file_group.add_argument(
        "--delete-file-when-sent",
        action="store_true",
        default=False,
        help="If all events read from a file have been "
        "successfully\nsent to the endpoint, delete the "
        "file. By default\nfiles are kept.",
    )

    ts_mon.add_argparse_options(parser)
    event_mon.add_argparse_options(parser)
    infra_libs.logs.add_argparse_options(parser)

    parser.set_defaults(
        ts_mon_flush="manual",
        ts_mon_target_type="task",
        ts_mon_task_service_name="send_monitoring_event",
        ts_mon_task_job_name="manual",
    )

    args = parser.parse_args(argv)

    if args.service_event_stack_trace:
        args.service_event_type = "CRASH"

    if args.build_event_type and args.service_event_type:
        parser.error(
            "Only one type of event can be sent at once. " "Got both --build-event-type and --service-event-type."
        )
    if (args.build_event_type and args.events_from_file) or (args.service_event_type and args.events_from_file):
        parser.error("--events-from-file is not compatible with either" "--service-event-type or --build-event-type.")
    return args