def test_add_syslog_handler():
    logger = logging.getLogger("test_add_syslog_handler1")
    add_syslog_handler(logger, ("localhost", 0), "app1")
    assert isinstance(logger.handlers[0], SplitSysLogHandler)
Ejemplo n.º 2
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("-r",
                        "--region",
                        action="append",
                        dest="regions",
                        required=True)
    parser.add_argument("-k",
                        "--secrets",
                        type=argparse.FileType('r'),
                        required=True)
    parser.add_argument("-c",
                        "--config",
                        type=argparse.FileType('r'),
                        required=True)
    parser.add_argument("-v",
                        "--verbose",
                        action="store_const",
                        dest="loglevel",
                        const=logging.DEBUG,
                        default=logging.INFO)
    parser.add_argument("-n",
                        "--dryrun",
                        dest="dryrun",
                        action="store_true",
                        help="don't actually do anything")
    parser.add_argument("-l",
                        "--logfile",
                        dest="logfile",
                        help="log file for full debug log")
    parser.add_argument("--latest-ami-percentage",
                        type=int,
                        default=100,
                        help="percentage instances which will be launched with"
                        " the latest ami available, remaining requests will be"
                        " made using the previous (default: 100)")

    args = parser.parse_args()

    logging.getLogger().setLevel(logging.DEBUG)
    logging.getLogger("boto").setLevel(logging.INFO)
    logging.getLogger("requests").setLevel(logging.WARN)
    logging.getLogger("iso8601").setLevel(logging.INFO)

    formatter = logging.Formatter("%(asctime)s - %(message)s")
    handler = logging.StreamHandler()
    handler.setFormatter(formatter)
    handler.setLevel(args.loglevel)
    logging.getLogger().addHandler(handler)
    if args.logfile:
        fhandler = logging.handlers.RotatingFileHandler(args.logfile,
                                                        maxBytes=10 *
                                                        (1024**2),
                                                        backupCount=100)
        fhandler.setLevel(logging.DEBUG)
        fhandler.setFormatter(formatter)
        logging.getLogger().addHandler(fhandler)

    config = json.load(args.config)
    secrets = json.load(args.secrets)

    aws_watch_pending(
        dburl=secrets['db'],
        regions=args.regions,
        builder_map=config['buildermap'],
        region_priorities=config['region_priorities'],
        dryrun=args.dryrun,
        spot_config=config.get("spot"),
        ondemand_config=config.get("ondemand"),
        latest_ami_percentage=args.latest_ami_percentage,
    )

    if all([
            config.get("graphite_host"),
            config.get("graphite_port"),
            config.get("graphite_prefix")
    ]):
        gr_log.add_destination(host=config["graphite_host"],
                               port=config["graphite_port"],
                               prefix=config["graphite_prefix"])

    for entry in secrets.get("graphite_hosts", []):
        host = entry.get("host")
        port = entry.get("port")
        prefix = entry.get("prefix")
        prefix = "{}.releng.aws.aws_watch_pending".format(entry.get("prefix"))
        if all([host, port, prefix]):
            gr_log.add_destination(host, port, prefix)
    if secrets.get("syslog_address"):
        add_syslog_handler(log,
                           address=secrets["syslog_address"],
                           app="aws_watch_pending")

    gr_log.sendall()
    log.debug("done")
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("-r", "--region", action="append", dest="regions",
                        required=True)
    parser.add_argument("-k", "--secrets", type=argparse.FileType('r'),
                        required=True)
    parser.add_argument("-c", "--config", type=argparse.FileType('r'),
                        required=True)
    parser.add_argument("-v", "--verbose", action="store_const",
                        dest="loglevel", const=logging.DEBUG,
                        default=logging.INFO)
    parser.add_argument("-n", "--dryrun", dest="dryrun", action="store_true",
                        help="don't actually do anything")
    parser.add_argument("-l", "--logfile", dest="logfile",
                        help="log file for full debug log")
    parser.add_argument("--latest-ami-percentage", type=int, default=100,
                        help="percentage instances which will be launched with"
                        " the latest ami available, remaining requests will be"
                        " made using the previous (default: 100)")

    args = parser.parse_args()

    logging.getLogger().setLevel(logging.DEBUG)
    logging.getLogger("boto").setLevel(logging.INFO)
    logging.getLogger("requests").setLevel(logging.WARN)
    logging.getLogger("iso8601").setLevel(logging.INFO)

    formatter = logging.Formatter("%(asctime)s - %(message)s")
    handler = logging.StreamHandler()
    handler.setFormatter(formatter)
    handler.setLevel(args.loglevel)
    logging.getLogger().addHandler(handler)
    if args.logfile:
        fhandler = logging.handlers.RotatingFileHandler(
            args.logfile, maxBytes=10 * (1024 ** 2), backupCount=100)
        fhandler.setLevel(logging.DEBUG)
        fhandler.setFormatter(formatter)
        logging.getLogger().addHandler(fhandler)

    config = json.load(args.config)
    secrets = json.load(args.secrets)

    aws_watch_pending(
        dburl=secrets['db'],
        regions=args.regions,
        builder_map=config['buildermap'],
        region_priorities=config['region_priorities'],
        dryrun=args.dryrun,
        spot_config=config.get("spot"),
        ondemand_config=config.get("ondemand"),
        latest_ami_percentage=args.latest_ami_percentage,
    )

    if all([config.get("graphite_host"), config.get("graphite_port"),
            config.get("graphite_prefix")]):
        gr_log.add_destination(
            host=config["graphite_host"], port=config["graphite_port"],
            prefix=config["graphite_prefix"])

    for entry in secrets.get("graphite_hosts", []):
        host = entry.get("host")
        port = entry.get("port")
        prefix = entry.get("prefix")
        prefix = "{}.releng.aws.aws_watch_pending".format(entry.get("prefix"))
        if all([host, port, prefix]):
            gr_log.add_destination(host, port, prefix)
    if secrets.get("syslog_address"):
        add_syslog_handler(log, address=secrets["syslog_address"],
                           app="aws_watch_pending")

    gr_log.sendall()
    log.debug("done")
Ejemplo n.º 4
0
def test_add_syslog_handler():
    logger = logging.getLogger("test_add_syslog_handler1")
    add_syslog_handler(logger, ("localhost", 0), "app1")
    assert isinstance(logger.handlers[0], SplitSysLogHandler)
Ejemplo n.º 5
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("-r", "--region", action="append", dest="regions",
                        required=True)
    parser.add_argument("-v", "--verbose", action="store_const",
                        dest="loglevel", const=logging.DEBUG,
                        default=logging.WARNING)
    parser.add_argument("-k", "--secrets", type=argparse.FileType('r'),
                        required=True)
    parser.add_argument("-u", "--user", required=True, help="SSH user name")
    parser.add_argument("--ssh-key", required=True,
                        help="Private SSH key path")
    parser.add_argument("-t", "--moz-type", action="append", dest="moz_types",
                        required=True,
                        help="moz-type tag values to be checked")
    parser.add_argument("-j", "--concurrency", type=int, default=8)
    parser.add_argument(
        "--masters-json",
        default="https://hg.mozilla.org/build/tools/raw-file/default/buildfarm"
        "/maintenance/production-masters.json")
    parser.add_argument("--dry-run", action="store_true")
    parser.add_argument("-l", "--logfile", dest="logfile",
                        help="log file for full debug log")

    args = parser.parse_args()

    logging.getLogger().setLevel(logging.DEBUG)
    logging.getLogger("boto").setLevel(logging.WARN)
    logging.getLogger("paramiko").setLevel(logging.WARN)
    logging.getLogger('requests').setLevel(logging.WARN)

    formatter = logging.Formatter("%(asctime)s - %(levelname)s -  %(message)s")
    handler = logging.StreamHandler()
    handler.setFormatter(formatter)
    handler.setLevel(args.loglevel)
    logging.getLogger().addHandler(handler)

    if args.logfile:
        handler = logging.handlers.RotatingFileHandler(
            args.logfile, maxBytes=10 * (1024 ** 2), backupCount=100)
        handler.setLevel(logging.DEBUG)
        handler.setFormatter(formatter)
        logging.getLogger().addHandler(handler)

    log.debug("starting")

    masters_json = requests.get(args.masters_json).json()
    secrets = json.load(args.secrets)

    aws_stop_idle(user=args.user, key_filename=args.ssh_key,
                  regions=args.regions, masters_json=masters_json,
                  moz_types=args.moz_types, dryrun=args.dry_run,
                  concurrency=args.concurrency)
    for entry in secrets.get("graphite_hosts", []):
        host = entry.get("host")
        port = entry.get("port")
        prefix = "{}.releng.aws.aws_stop_idle".format(entry.get("prefix"))
        if all([host, port, prefix]):
            gr_log.add_destination(host, port, prefix)

    if secrets.get("syslog_address"):
        add_syslog_handler(log, address=secrets["syslog_address"],
                           app="aws_stop_idle")

    gr_log.sendall()
    log.debug("done")
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("-r",
                        "--region",
                        action="append",
                        dest="regions",
                        required=True)
    parser.add_argument("-v",
                        "--verbose",
                        action="store_const",
                        dest="loglevel",
                        const=logging.DEBUG,
                        default=logging.WARNING)
    parser.add_argument("-k",
                        "--secrets",
                        type=argparse.FileType('r'),
                        required=True)
    parser.add_argument("-u", "--user", required=True, help="SSH user name")
    parser.add_argument("--ssh-key",
                        required=True,
                        help="Private SSH key path")
    parser.add_argument("-t",
                        "--moz-type",
                        action="append",
                        dest="moz_types",
                        required=True,
                        help="moz-type tag values to be checked")
    parser.add_argument("-j", "--concurrency", type=int, default=8)
    parser.add_argument(
        "--masters-json",
        default="https://hg.mozilla.org/build/tools/raw-file/default/buildfarm"
        "/maintenance/production-masters.json")
    parser.add_argument("--dry-run", action="store_true")
    parser.add_argument("-l",
                        "--logfile",
                        dest="logfile",
                        help="log file for full debug log")

    args = parser.parse_args()

    logging.getLogger().setLevel(logging.DEBUG)
    logging.getLogger("boto").setLevel(logging.WARN)
    logging.getLogger("paramiko").setLevel(logging.WARN)
    logging.getLogger('requests').setLevel(logging.WARN)

    formatter = logging.Formatter("%(asctime)s - %(levelname)s -  %(message)s")
    handler = logging.StreamHandler()
    handler.setFormatter(formatter)
    handler.setLevel(args.loglevel)
    logging.getLogger().addHandler(handler)

    if args.logfile:
        handler = logging.handlers.RotatingFileHandler(args.logfile,
                                                       maxBytes=10 * (1024**2),
                                                       backupCount=100)
        handler.setLevel(logging.DEBUG)
        handler.setFormatter(formatter)
        logging.getLogger().addHandler(handler)

    log.debug("starting")

    masters_json = requests.get(args.masters_json).json()
    secrets = json.load(args.secrets)

    aws_stop_idle(user=args.user,
                  key_filename=args.ssh_key,
                  regions=args.regions,
                  masters_json=masters_json,
                  moz_types=args.moz_types,
                  dryrun=args.dry_run,
                  concurrency=args.concurrency)
    for entry in secrets.get("graphite_hosts", []):
        host = entry.get("host")
        port = entry.get("port")
        prefix = "{}.releng.aws.aws_stop_idle".format(entry.get("prefix"))
        if all([host, port, prefix]):
            gr_log.add_destination(host, port, prefix)

    if secrets.get("syslog_address"):
        add_syslog_handler(log,
                           address=secrets["syslog_address"],
                           app="aws_stop_idle")

    gr_log.sendall()
    log.debug("done")