示例#1
0
 def test_empty_config(self):
     config = PbenchConfig(_config_path_prefix / "pbench.cfg")
     assert config.TZ == "UTC", f"Unexpected TZ value, {config.TZ!r}"
     assert (config.log_fmt is
             None), f"Unexpected log format value, {config.log_fmt!r}"
     assert (
         config.default_logging_level == "INFO"
     ), f"Unexpected default logging level, {config.default_logging_level!r}"
     assert (
         config.log_using_caller_directory is False
     ), f"Unexpected 'log using caller directory' boolean, {config.log_using_caller_directory!r}"
     assert config.log_dir is None, f"Unexpected log directory, {config.log_dir!r}"
     assert (config.logger_type == "devlog"
             ), f"Unexpected logger type, {config.logger_type!r}"
     with pytest.raises(AttributeError):
         print(f"{config.logger_host!r}")
     with pytest.raises(AttributeError):
         print(f"{config.logger_port!r}")
     assert "42" == config.get(
         "other",
         "foobar"), "Failed to fetch 'foobar' from 'DEFAULT' section"
     assert "43" == config.get(
         "other", "barfoo"), "Failed to fetch 'barfoo' from 'other' section"
     assert isinstance(
         config.files, list
     ), f"Unexpected object class for 'files', {config.files.__class__!r}"
示例#2
0
 def test_logger_type_provided(self):
     config = PbenchConfig(_config_path_prefix / "hostport.cfg")
     assert (config.logger_type == "hostport"
             ), f"Unexpected logger type, {config.logger_type!r}"
     assert (config.logger_host == "logger.example.com"
             ), f"Unexpected logger host value, {config.logger_host!r}"
     assert (config.logger_port == "42"
             ), f"Unexpected logger port value, {config.logger_port!r}"
示例#3
0
 def config(self):
     # Setup the configuration
     config_prefix_path = Path("lib/pbench/test/unit/common/config/")
     self.config = PbenchConfig(config_prefix_path / "pbench.cfg")
     self.logger = None
     yield
     # Teardown the setup
     self.config = None
     self.logger = None
示例#4
0
def test_pbench_logger():

    config = PbenchConfig(cfg_name)
    logger = get_pbench_logger(_NAME_, config)

    logger_type = config.get("logging", "logger_type")

    logger = mock_the_handler(logger, logger_type, log_files[logger_type])
    logger.debug(log_msgs[logger_type])
    
    if os.path.isfile(os.path.join(logdir,log_files[logger_type])):
        with open(os.path.join(logdir,log_files[logger_type]), 'r') as f:
            assert f.read()[:-1] == log_msgs[logger_type], "Mismatch: the file did not contain the expected message."
示例#5
0
 def test_log_level(self):
     """Test to verify log level setting."""
     # Was test-26.5, test_logger_level.py
     config_prefix_path = Path("lib/pbench/test/unit/common/config/")
     config = PbenchConfig(config_prefix_path / "log-level.cfg")
     logger = get_pbench_logger("test_log_level", config)
     assert (
         config.logger_type == "devlog"
     ), f"Unexpected logger type encountered, '{config.logger_type}', expected 'devlog'"
     assert (
         logger.logger.getEffectiveLevel() == logging.INFO
     ), f"Unexpected default logging level, {logger.logger.getEffectiveLevel()}"
     logger = get_pbench_logger("other", config)
     assert (
         logger.logger.getEffectiveLevel() == logging.CRITICAL
     ), f"Unexpected logging level, {logger.logger.getEffectiveLevel()}"
示例#6
0
    # file to operate, and we know the relative location of that config file,
    # we check to see if that exists before declaring a problem.
    config_name = os.path.join(os.path.dirname(_dir), "lib", "config",
                               "pbench-server.cfg")
    if not os.path.exists(config_name):
        print(
            "{}: No config file specified: set _PBENCH_SERVER_CONFIG env variable or use"
            " --config <file> on the command line".format(_prog),
            file=sys.stderr,
        )
        sys.exit(1)
else:
    config_name = parsed.cfg_name

try:
    config = PbenchConfig(config_name)
except BadConfig as e:
    print("{}: {} (config file {})".format(_prog, e, config_name),
          file=sys.stderr)
    sys.exit(1)

# Exclude the "files" and "conf" attributes from being exported
vars = sorted([
    key for key in config.__dict__.keys()
    if key not in ("files", "conf", "timestamp", "_unittests", "get")
])
for att in vars:
    try:
        os.environ[att] = getattr(config, att)
    except AttributeError:
        print(
def main():
    cfg_name = os.environ.get("_PBENCH_SERVER_CONFIG")
    if not cfg_name:
        print(
            "{}: ERROR: No config file specified; set _PBENCH_SERVER_CONFIG env variable or"
            " use --config <file> on the command line".format(_NAME_),
            file=sys.stderr)
        return 2

    try:
        config = PbenchConfig(cfg_name)
    except BadConfig as e:
        print("{}: {}".format(_NAME_, e), file=sys.stderr)
        return 1

    logger = get_pbench_logger(_NAME_, config)

    archive = config.ARCHIVE
    if not os.path.isdir(archive):
        logger.error(
            "The setting for ARCHIVE in the config file is {}, but that is"
            " not a directory", archive)
        return 1

    # add a BACKUP field to the config object
    config.BACKUP = backup = config.conf.get("pbench-server",
                                             "pbench-backup-dir")
    if len(backup) == 0:
        logger.error(
            "Unspecified backup directory, no pbench-backup-dir config in"
            " pbench-server section")
        return 1
    if not os.path.isdir(backup):
        logger.error(
            "The setting for BACKUP in the config file is {}, but that is"
            " not a directory", backup)
        return 1

    # instantiate the s3config class
    s3_config_obj = S3Config(config, logger)
    s3_config_obj = sanity_check(s3_config_obj, logger)

    logger.info('start-{}', config.TS)
    start = config.timestamp()

    prog = os.path.basename(sys.argv[0])

    sts = 0
    # N.B. tmpdir is the pathname of the temp directory.
    with tempfile.TemporaryDirectory() as tmpdir:

        archive_obj = BackupObject("ARCHIVE", config.ARCHIVE, tmpdir, logger)
        local_backup_obj = BackupObject("BACKUP", config.BACKUP, tmpdir,
                                        logger)
        s3_backup_obj = BackupObject("S3", s3_config_obj, tmpdir, logger)

        with tempfile.NamedTemporaryFile(mode='w+t', dir=tmpdir) as reportfp:
            reportfp.write("{}.{} ({}) started at {}\n".format(
                prog, config.TS, config.PBENCH_ENV, start))
            if s3_config_obj is None:
                reportfp.write(
                    "\nNOTICE: S3 backup service is inaccessible; skipping"
                    " ARCHIVE to S3 comparison\n\n")

            # FIXME: Parallelize these three ...

            # Create entry list for archive
            logger.debug('Starting archive list creation')
            ar_start = config.timestamp()
            ret_sts = archive_obj.entry_list_creation()
            if ret_sts == Status.FAIL:
                sts += 1
            logger.debug('Finished archive list ({!r})', ret_sts)

            # Create entry list for backup
            logger.debug('Starting local backup list creation')
            lb_start = config.timestamp()
            ret_sts = local_backup_obj.entry_list_creation()
            if ret_sts == Status.FAIL:
                sts += 1
            logger.debug('Finished local backup list ({!r})', ret_sts)

            # Create entry list for S3
            if s3_config_obj is not None:
                logger.debug('Starting S3 list creation')
                s3_start = config.timestamp()
                ret_sts = s3_backup_obj.entry_list_creation()
                if ret_sts == Status.FAIL:
                    sts += 1
                logger.debug('Finished S3 list ({!r})', ret_sts)

            logger.debug('Checking MD5 signatures of archive')
            ar_md5_start = config.timestamp()
            try:
                # Check the data integrity in ARCHIVE (Question 1).
                md5_result_archive = archive_obj.checkmd5()
            except Exception as ex:
                msg = "Failed to check data integrity of ARCHIVE ({})".format(
                    config.ARCHIVE)
                logger.exception(msg)
                reportfp.write("\n{} - '{}'\n".format(msg, ex))
                sts += 1
            else:
                if md5_result_archive > 0:
                    # Create a report for failed MD5 results from ARCHIVE (Question 1)
                    archive_obj.report_failed_md5(reportfp)
                    sts += 1
            logger.debug('Finished checking MD5 signatures of archive')

            logger.debug('Checking MD5 signatures of local backup')
            lb_md5_start = config.timestamp()
            try:
                # Check the data integrity in BACKUP (Question 2).
                md5_result_backup = local_backup_obj.checkmd5()
            except Exception as ex:
                msg = "Failed to check data integrity of BACKUP ({})".format(
                    config.BACKUP)
                logger.exception(msg)
                reportfp.write("\n{} - '{}'\n".format(msg, ex))
            else:
                if md5_result_backup > 0:
                    # Create a report for failed MD5 results from BACKUP (Question 2)
                    local_backup_obj.report_failed_md5(reportfp)
                    sts += 1
            logger.debug('Finished checking MD5 signatures of local backup')

            # Compare ARCHIVE with BACKUP (Questions 3 and 3a).
            msg = "Comparing ARCHIVE with BACKUP"
            reportfp.write("\n{}\n{}\n".format(msg, "-" * len(msg)))
            compare_entry_lists(archive_obj, local_backup_obj, reportfp)

            if s3_config_obj is not None:
                # Compare ARCHIVE with S3 (Questions 4, 4a, and 4b).
                msg = "Comparing ARCHIVE with S3"
                reportfp.write("\n{}\n{}\n".format(msg, "-" * len(msg)))
                compare_entry_lists(archive_obj, s3_backup_obj, reportfp)

            if s3_config_obj is None:
                s3_start = "<skipped>"
            reportfp.write("\n\nPhases (started):\n"
                           "Archive List Creation:       {}\n"
                           "Local Backup List Creation:  {}\n"
                           "S3 List Creation:            {}\n"
                           "Archive MD5 Checks:          {}\n"
                           "Local Backup MD5 Checks:     {}\n".format(
                               ar_start, lb_start, s3_start, ar_md5_start,
                               lb_md5_start))

            end = config.timestamp()
            reportfp.write("\n{}.{} ({}) finished at {}\n".format(
                prog, config.TS, config.PBENCH_ENV, end))

            # Rewind to the beginning.
            reportfp.seek(0)

            report = Report(config, _NAME_)
            report.init_report_template()
            try:
                report.post_status(config.timestamp(), "status", reportfp.name)
            except Exception:
                pass

    logger.info('end-{}', config.TS)

    return sts
示例#8
0
def main(options):
    if not options.cfg_name:
        print(
            f"{_NAME_}: ERROR: No config file specified; set"
            " _PBENCH_SERVER_CONFIG env variable",
            file=sys.stderr,
        )
        return 1

    try:
        config = PbenchConfig(options.cfg_name)
    except BadConfig as e:
        print(f"{_NAME_}: {e}", file=sys.stderr)
        return 2

    try:
        archive_p = Path(config.ARCHIVE).resolve(strict=True)
    except FileNotFoundError:
        print(
            f"The configured ARCHIVE directory, {config.ARCHIVE}, does not exist",
            file=sys.stderr,
        )
        return 3

    if not archive_p.is_dir():
        print(
            f"The configured ARCHIVE directory, {config.ARCHIVE}, is not a valid directory",
            file=sys.stderr,
        )
        return 4

    try:
        incoming_p = Path(config.INCOMING).resolve(strict=True)
    except FileNotFoundError:
        print(
            f"The configured INCOMING directory, {config.INCOMING}, does not exist",
            file=sys.stderr,
        )
        return 5

    if not incoming_p.is_dir():
        print(
            f"The configured INCOMING directory, {config.INCOMING}, is not a valid directory",
            file=sys.stderr,
        )
        return 6

    _fmt = "%Y-%m-%d"
    try:
        oldest_dt = datetime.strptime(options.oldest, _fmt)
        newest_dt = datetime.strptime(options.newest, _fmt)
    except Exception as exc:
        print(
            f"Invalid time range, {options.oldest} to {options.newest}, "
            f"'{exc}', expected time range values in the form YYYY-MM-DD",
            file=sys.stderr,
        )
        return 7
    else:
        if newest_dt < oldest_dt:
            # For convenience, swap oldest and newest dates that are reversed.
            oldest_dt, newest_dt = newest_dt, oldest_dt

    print(f"Re-indexing tar balls in the range {oldest_dt} to {newest_dt}")

    actions = []
    start = pbench._time()
    for _val in gen_reindex_list(archive_p, oldest_dt, newest_dt):
        controller_name, tb_name = _val
        act_set = reindex(controller_name, tb_name, archive_p, incoming_p,
                          options.dry_run)
        actions.append(act_set)
    end = pbench._time()

    for act_set in sorted(actions):
        print(f"{act_set!r}")

    print(f"Run-time: {start} {end} {end - start}")
    return 0
    help="The caller's user ID (optional)",
)
parser.add_argument(
    "-T",
    "--type",
    dest="doctype",
    required=True,
    help="The type of report document to index, one of status|error",
)
parser.add_argument("file_to_index",
                    nargs=1,
                    help="The file containing the report to index")
parsed = parser.parse_args()

try:
    config = PbenchConfig(parsed.cfg_name)
except BadConfig as e:
    print("{}: {}".format(_prog, e), file=sys.stderr)
    sys.exit(1)

hostname = gethostname()
pid = parsed.pid
group_id = parsed.group_id
user_id = parsed.user_id

report = Report(config,
                parsed.name,
                pid=pid,
                group_id=group_id,
                user_id=user_id,
                hostname=hostname)
def main():
    cfg_name = os.environ.get("CONFIG")
    if not cfg_name:
        print("{}: ERROR: No config file specified; set CONFIG env variable or"
              " use --config <file> on the command line".format(_NAME_),
              file=sys.stderr)
        return 2

    try:
        config = PbenchConfig(cfg_name)
    except BadConfig as e:
        print("{}: {}".format(_NAME_, e), file=sys.stderr)
        return 1

    logger = get_pbench_logger(_NAME_, config)

    archive = config.ARCHIVE
    if not os.path.isdir(archive):
        logger.error(
            "The setting for ARCHIVE in the config file is {}, but that is not a directory",
            archive)
        return 1

    # add a BACKUP field to the config object
    config.BACKUP = backup = config.conf.get("pbench-server",
                                             "pbench-backup-dir")
    if len(backup) == 0:
        logger.error(
            "Unspecified backup directory, no pbench-backup-dir config in pbench-server section"
        )
        return 1

    if not os.path.isdir(backup):
        logger.error(
            "The setting for BACKUP in the config file is {}, but that is not a directory",
            backup)
        return 1

    # instantiate the s3config class
    s3_config_obj = S3Config(config, logger)
    s3_config_obj = sanity_check(s3_config_obj, logger)

    logger.info('start-{}', config.TS)

    prog = os.path.basename(sys.argv[0])

    sts = 0
    # N.B. tmpdir is the pathname of the temp directory.
    with tempfile.TemporaryDirectory() as tmpdir:

        archive_obj = BackupObject("ARCHIVE", config.ARCHIVE)
        local_backup_obj = BackupObject("BACKUP", config.BACKUP)
        s3_backup_obj = BackupObject("S3", s3_config_obj)

        # Create entry list for archive
        archive_entry_list = entry_list_creation(archive_obj, config.ARCHIVE,
                                                 logger)
        if archive_entry_list == Status.FAIL:
            sts += 1

        # Create entry list for backup
        backup_entry_list = entry_list_creation(local_backup_obj,
                                                config.BACKUP, logger)
        if backup_entry_list == Status.FAIL:
            sts += 1

        # Create entry list for S3
        s3_entry_list = entry_list_creation_s3(s3_config_obj, logger)
        if s3_entry_list == Status.FAIL:
            sts += 1

        with tempfile.NamedTemporaryFile(mode='w+t', dir=tmpdir) as reportfp:
            reportfp.write("{}.{}({})\n".format(prog, config.TS,
                                                config.PBENCH_ENV))

            try:
                # Check the data integrity in ARCHIVE (Question 1).
                md5_result_archive = checkmd5(config.ARCHIVE, tmpdir,
                                              archive_obj, logger)
            except Exception:
                msg = "Failed to check data integrity of ARCHIVE ({})".format(
                    config.ARCHIVE)
                logger.exception(msg)
                reportfp.write("{}\n".format(msg))
                sts += 1
            else:
                if md5_result_archive > 0:
                    # Create a report for failed MD5 results from ARCHIVE (Question 1)
                    report_failed_md5(archive_obj, tmpdir, reportfp, logger)
                    sts += 1

            try:
                # Check the data integrity in BACKUP (Question 2).
                md5_result_backup = checkmd5(config.BACKUP, tmpdir,
                                             local_backup_obj, logger)
            except Exception:
                msg = "Failed to check data integrity of BACKUP ({})".format(
                    config.BACKUP)
                logger.exception(msg)
                reportfp.write("{}\n".format(msg))
            else:
                if md5_result_backup > 0:
                    # Create a report for failed MD5 results from BACKUP (Question 2)
                    report_failed_md5(local_backup_obj, tmpdir, reportfp,
                                      logger)
                    sts += 1

            # Compare ARCHIVE with BACKUP (Questions 3 and 3a).
            compare_entry_lists(archive_obj, local_backup_obj,
                                archive_entry_list, backup_entry_list,
                                reportfp)

            if s3_config_obj is None:
                reportfp.write('S3 backup service is inaccessible.\n')
            else:
                # Compare ARCHIVE with S3 (Questions 4, 4a, and 4b).
                compare_entry_lists(archive_obj, s3_backup_obj,
                                    archive_entry_list, s3_entry_list,
                                    reportfp)

            # Rewind to the beginning.
            reportfp.seek(0)

            report = Report(config, _NAME_)
            report.init_report_template()
            try:
                report.post_status(config.timestamp(), "status", reportfp.name)
            except Exception:
                pass

    logger.info('end-{}', config.TS)

    return sts
示例#11
0
def main():
    cfg_name = os.environ.get("_PBENCH_SERVER_CONFIG")

    if not cfg_name:
        print(
            "{}: ERROR: No config file specified; set _PBENCH_SERVER_CONFIG env variable or"
            " use --config <file> on the command line".format(_NAME_),
            file=sys.stderr)
        return 2

    try:
        config = PbenchConfig(cfg_name)
    except BadConfig as e:
        print("{}: {}".format(_NAME_, e), file=sys.stderr)
        return 1

    logger = get_pbench_logger(_NAME_, config)

    # Add a BACKUP and QDIR field to the config object
    config.BACKUP = config.conf.get("pbench-server", "pbench-backup-dir")
    config.QDIR = config.get('pbench-server', 'pbench-quarantine-dir')

    # call the LocalBackupObject class
    lb_obj = LocalBackupObject(config)

    # call the S3Config class
    s3_obj = S3Config(config, logger)

    lb_obj, s3_obj = sanity_check(lb_obj, s3_obj, config, logger)

    if lb_obj is None and s3_obj is None:
        return 3

    logger.info('start-{}'.format(config.TS))

    # Initiate the backup
    counts = backup_data(lb_obj, s3_obj, config, logger)

    result_string = ("Total processed: {},"
                     " Local backup successes: {},"
                     " Local backup failures: {},"
                     " S3 upload successes: {},"
                     " S3 upload failures: {},"
                     " Quarantined: {}".format(counts.ntotal,
                                               counts.nbackup_success,
                                               counts.nbackup_fail,
                                               counts.ns3_success,
                                               counts.ns3_fail,
                                               counts.nquaran))

    logger.info(result_string)

    prog = os.path.basename(sys.argv[0])

    # prepare and send report
    with tempfile.NamedTemporaryFile(mode='w+t', dir=config.TMP) as reportfp:
        reportfp.write("{}.{}({})\n{}\n".format(prog, config.timestamp(),
                                                config.PBENCH_ENV,
                                                result_string))
        reportfp.seek(0)

        report = Report(config, _NAME_)
        report.init_report_template()
        try:
            report.post_status(config.timestamp(), "status", reportfp.name)
        except Exception:
            pass

    logger.info('end-{}'.format(config.TS))

    return 0
def main(options):
    if not options.tb_path:
        print(
            f"{_NAME_}: ERROR: No tar ball path specified",
            file=sys.stderr,
        )
        return 2
    tb_path = os.path.realpath(options.tb_path)
    tb_name = os.path.basename(tb_path)

    if not options.cfg_name:
        print(
            f"{_NAME_}: ERROR: No config file specified; set"
            " _PBENCH_SERVER_CONFIG env variable",
            file=sys.stderr,
        )
        return 3

    try:
        config = PbenchConfig(options.cfg_name)
    except BadConfig as e:
        print(f"{_NAME_}: {e}", file=sys.stderr)
        return 4

    archive = config.ARCHIVE
    archive_p = os.path.realpath(archive)

    if not archive_p:
        print(
            f"The configured ARCHIVE directory, {archive}, does not exist",
            file=sys.stderr,
        )
        return 5

    if not os.path.isdir(archive_p):
        print(
            f"The configured ARCHIVE directory, {archive},"
            " is not a valid directory",
            file=sys.stderr,
        )
        return 6

    incoming = config.INCOMING
    incoming_p = os.path.realpath(incoming)

    if not incoming_p:
        print(
            f"The configured INCOMING directory, {incoming}, does not exist",
            file=sys.stderr,
        )
        return 7

    if not os.path.isdir(incoming_p):
        print(
            f"The configured INCOMING directory, {incoming},"
            " is not a valid directory",
            file=sys.stderr,
        )
        return 8

    # Fetch the configured maximum number of days a tar can remain "unpacked"
    # in the INCOMING tree.
    try:
        max_unpacked_age = config.conf.get("pbench-server", "max-unpacked-age")
    except NoOptionError as e:
        print(f"{e}", file=sys.stderr)
        return 9
    try:
        max_unpacked_age = int(max_unpacked_age)
    except Exception:
        print(f"Bad maximum unpacked age, {max_unpacked_age}", file=sys.stderr)
        return 10

    # Check the unpacked directory name pattern.
    match = tb_pat.fullmatch(tb_name)
    if not match:
        print(f"Unrecognized tar ball name format, {tb_name}", file=sys.stderr)
        return 11

    if not tb_path.startswith(archive_p):
        print(f"Given tar ball, {tb_path}, not from the ARCHIVE tree",
              file=sys.stderr)
        return 12

    if not os.path.exists(tb_path):
        print(
            f"Given tar ball, {tb_path}, does not seem to exist in the ARCHIVE tree",
            file=sys.stderr,
        )
        return 13

    # Determine the proper time to use as a reference.
    if config._ref_datetime is not None:
        try:
            curr_dt = config._ref_datetime
        except Exception:
            # Ignore bad dates from test environment.
            curr_dt = datetime.utcnow()
    else:
        curr_dt = datetime.utcnow()

    # Turn the pattern components of the match into a datetime object.
    tb_dt = datetime(
        int(match.group(1)),
        int(match.group(2)),
        int(match.group(3)),
        int(match.group(4)),
        int(match.group(5)),
        int(match.group(6)),
    )

    # See if this unpacked tar ball directory has "aged" out.
    timediff = curr_dt - tb_dt
    if timediff.days > max_unpacked_age:
        # Finally, make one last check to see if this tar ball
        # directory should be kept regardless of aging out.
        controller_p = os.path.basename(os.path.dirname(tb_path))
        if os.path.isfile(
                os.path.join(incoming_p, controller_p, tb_name,
                             ".__pbench_keep__")):
            ret_val = 0
        else:
            ret_val = 1
    else:
        ret_val = 0

    return ret_val
示例#13
0
 def test_logger_type_hostport_missing(self):
     with pytest.raises(BadConfig):
         PbenchConfig(_config_path_prefix / "hostport-missing.cfg")
     with pytest.raises(BadConfig):
         PbenchConfig(_config_path_prefix / "hostport-missing-port.cfg")
示例#14
0
 def test_log_dir_provided(self):
     config = PbenchConfig(_config_path_prefix / "logdir.cfg")
     assert (config.log_dir == "/srv/log/directory"
             ), f"Unexpected log directory, {config.log_dir!r}"