Ejemplo n.º 1
0
def checkpointAndCheckStatus(checkpoint_file, apache_logs, config):
    """Checkpoint apache_logs into file and return true if still is a master."""
    if not liblog.makeValid(checkpoint_file, apache_logs):
        logging.error('Error writing checkpoint %s' % checkpoint_file)
        return 0
    if not isMaster(config):
        logging.error('I am not a master. Terminating')
        return 0
    return 1
Ejemplo n.º 2
0
def checkpointAndCheckStatus(checkpoint_file, apache_logs, config):
  """Checkpoint apache_logs into file and return true if still is a master."""
  if not liblog.makeValid(checkpoint_file, apache_logs):
    logging.error('Error writing checkpoint %s' % checkpoint_file)
    return 0
  if not isMaster(config):
    logging.error('I am not a master. Terminating')
    return 0
  return 1
Ejemplo n.º 3
0
def CreateLogReport(config, date_str, logs, main_google3_dir,
                    withResults, topCount, diagnosticTerms,
                    html_file, valid_file,
                    new_html_file, new_valid_file):
  """This method generate an aggregate report on search queries over a
  period of days."""

  logging.info('Creating log report for %s' % date_str)

  # see if the report is already valid
  if (liblog.checkValid(html_file, valid_file, logs) and
      gfile.Exists(html_file)):
    logging.info('%s is already valid' % html_file)
    return liblog.STILL_VALID

  # build the list of args
  args = [date_str, new_html_file, withResults, topCount,
          diagnosticTerms]
  args.extend(map(lambda(x): x.file, logs))

  arg_str = string.join( map(commands.mkarg, args) )

  stats_cmd = ('cd %s/enterprise/legacy/analyzelogs/scripts; '
               './enterprise_stats.py %s' % (main_google3_dir, arg_str))

  (status, output) = liblog.DoCommand(stats_cmd)

  if status != 0:
    logging.error('Error running enterprise_stats: %s' % output)
    return liblog.FAILURE

  # make valid file
  if not liblog.makeValid(new_valid_file, logs):
    logging.error('Error making valid file %s' % new_html_file)
    return liblog.FAILURE

  logging.info('Done log_report for %s' % new_html_file)
  return liblog.SUCCESS
Ejemplo n.º 4
0
def CreateLogReport(config, date_str, logs, main_google3_dir, withResults,
                    topCount, diagnosticTerms, html_file, valid_file,
                    new_html_file, new_valid_file):
    """This method generate an aggregate report on search queries over a
  period of days."""

    logging.info('Creating log report for %s' % date_str)

    # see if the report is already valid
    if (liblog.checkValid(html_file, valid_file, logs)
            and gfile.Exists(html_file)):
        logging.info('%s is already valid' % html_file)
        return liblog.STILL_VALID

    # build the list of args
    args = [date_str, new_html_file, withResults, topCount, diagnosticTerms]
    args.extend(map(lambda (x): x.file, logs))

    arg_str = string.join(map(commands.mkarg, args))

    stats_cmd = ('cd %s/enterprise/legacy/analyzelogs/scripts; '
                 './enterprise_stats.py %s' % (main_google3_dir, arg_str))

    (status, output) = liblog.DoCommand(stats_cmd)

    if status != 0:
        logging.error('Error running enterprise_stats: %s' % output)
        return liblog.FAILURE

    # make valid file
    if not liblog.makeValid(new_valid_file, logs):
        logging.error('Error making valid file %s' % new_html_file)
        return liblog.FAILURE

    logging.info('Done log_report for %s' % new_html_file)
    return liblog.SUCCESS
Ejemplo n.º 5
0
def main(argv):
  argc = len(argv)

  if argc < 6:
    sys.exit(__doc__)

  config = entconfig.EntConfig(argv[0])
  if not config.Load():
    sys.exit(__doc__)

  pywrapbase.InitGoogleScript('', ['foo',
          '--gfs_aliases=%s' % config.var("GFS_ALIASES"),
          '--bnsresolver_use_svelte=false',
          '--logtostderr'], 0)
  gfile.Init()

  client = argv[1]
  date_arg = argv[2]
  html_file = argv[3]
  valid_file = argv[4]
  new_valid_file = argv[5]

  # extract tag and date_range from command line args
  date_fields = string.split(date_arg, '_')
  date_range = liblog.ParseDateRange(date_fields[0], date_fields[1:])

  if not date_range:
    sys.exit(__doc__)

  first_date, last_date, printable_date, file_date = date_range

  if last_date.as_int() < first_date.as_int():
    sys.exit(__doc__)

  gws_log_dir = liblog.get_gws_log_dir(config)
  click_dir = liblog.get_click_dir(config)
  collect_dir = liblog.get_collect_dir(config)
  apache_dir = liblog.get_apache_dir(config)
  directory_map_file = liblog.get_directory_map_file(config)

  # we need to collect logs first from all gws nodes and preprocess
  # logs first to make sure logs are up to date.
  all_machines = config.var('MACHINES')
  collect_logs.CollectLogs(all_machines, gws_log_dir, collect_dir)
  preprocess_logs.PartitionLogs(config)

  # make a vector of Log objects for all apache_logs and click_logs matching
  # the given date range and client.
  apache_logs = liblog.FindClientLogFiles(apache_dir, directory_map_file,
                                          client, first_date, last_date)
  click_logs = liblog.FindClientLogFiles(click_dir, directory_map_file,
                                          client, first_date, last_date)

  # If we have valid file and report file, we check to see if the data in
  # apache_dir has been changed and if the report is still valid.
  if (gfile.Exists(html_file) and gfile.Exists(valid_file) and
      liblog.checkValid(html_file, valid_file, apache_logs)):
    logging.info('%s still valid.' % html_file)
    sys.exit(liblog.STILL_VALID)

  # if there is no valid report, we create a new one
  DumpApacheAndClickLogs(apache_logs, click_logs)
  if not liblog.makeValid(new_valid_file, apache_logs):
    logging.error('Error validating %s' % html_file)
    sys.exit(liblog.FAILURE)

  logging.info('done apache_log, new_valid_file: %s' % new_valid_file)
  sys.exit(liblog.SUCCESS)
Ejemplo n.º 6
0
def main(argv):
    argc = len(argv)

    if argc < 6:
        sys.exit(__doc__)

    config = entconfig.EntConfig(argv[0])
    if not config.Load():
        sys.exit(__doc__)

    pywrapbase.InitGoogleScript('', [
        'foo',
        '--gfs_aliases=%s' % config.var("GFS_ALIASES"),
        '--bnsresolver_use_svelte=false', '--logtostderr'
    ], 0)
    gfile.Init()

    client = argv[1]
    date_arg = argv[2]
    html_file = argv[3]
    valid_file = argv[4]
    new_valid_file = argv[5]

    # extract tag and date_range from command line args
    date_fields = string.split(date_arg, '_')
    date_range = liblog.ParseDateRange(date_fields[0], date_fields[1:])

    if not date_range:
        sys.exit(__doc__)

    first_date, last_date, printable_date, file_date = date_range

    if last_date.as_int() < first_date.as_int():
        sys.exit(__doc__)

    gws_log_dir = liblog.get_gws_log_dir(config)
    click_dir = liblog.get_click_dir(config)
    collect_dir = liblog.get_collect_dir(config)
    apache_dir = liblog.get_apache_dir(config)
    directory_map_file = liblog.get_directory_map_file(config)

    # we need to collect logs first from all gws nodes and preprocess
    # logs first to make sure logs are up to date.
    all_machines = config.var('MACHINES')
    collect_logs.CollectLogs(all_machines, gws_log_dir, collect_dir)
    preprocess_logs.PartitionLogs(config)

    # make a vector of Log objects for all apache_logs and click_logs matching
    # the given date range and client.
    apache_logs = liblog.FindClientLogFiles(apache_dir, directory_map_file,
                                            client, first_date, last_date)
    click_logs = liblog.FindClientLogFiles(click_dir, directory_map_file,
                                           client, first_date, last_date)

    # If we have valid file and report file, we check to see if the data in
    # apache_dir has been changed and if the report is still valid.
    if (gfile.Exists(html_file) and gfile.Exists(valid_file)
            and liblog.checkValid(html_file, valid_file, apache_logs)):
        logging.info('%s still valid.' % html_file)
        sys.exit(liblog.STILL_VALID)

    # if there is no valid report, we create a new one
    DumpApacheAndClickLogs(apache_logs, click_logs)
    if not liblog.makeValid(new_valid_file, apache_logs):
        logging.error('Error validating %s' % html_file)
        sys.exit(liblog.FAILURE)

    logging.info('done apache_log, new_valid_file: %s' % new_valid_file)
    sys.exit(liblog.SUCCESS)