示例#1
0
def get_logger(name=None, filename='access', clear_handlers=False, userid_format=False):
    key = "{}{}".format(name, filename)
    if not clear_handlers and key in _loggers:
        logger = _loggers[key]
        assert isinstance(logger, LoggerAdapter)
        return logger

    filepath = os.path.join(curdir, 'logfiles', f"{filename}")

    if userid_format:
        formatter = f'%(asctime)s|%(process)d|%(filename)s|nm:%(lineno)d|%(levelname)s|%(userid)s|%(message)s'
    else:
        formatter = f'%(asctime)s|%(process)d|%(filename)s|nm:%(lineno)d|%(levelname)s|%(message)s'
    formatter_color = f'%(log_color)s' + formatter

    logger = colorlog.getLogger(name)
    # 防止信息往上级传递 eg: root loger
    # 如果配置上级logger,就会输出两边
    logger.propagate = False
    if clear_handlers:
        logger.handlers.clear()

    # 控制台

    if colorlog:
        handler = colorlog.StreamHandler()
        fmt = colorlog.ColoredFormatter(formatter_color,
                                        datefmt=None,
                                        reset=True,
                                        log_colors={
                                            'DEBUG': 'cyan',
                                            'INFO': 'green',
                                            'WARNING': 'yellow',
                                            'ERROR': 'red',
                                            'CRITICAL': 'red,bg_white',
                                        },
                                        secondary_log_colors={
                                            # 'message': {
                                            #     'ERROR': 'yellow',
                                            #     'CRITICAL': 'yellow'}
                                        })
    else:
        fmt = Formatter(formatter)
        handler = StreamHandler()
    handler.setFormatter(fmt)
    logger.addHandler(handler)

    if filename:
        # 日志文件
        # fileSizeJandler = RotatingFileHandler(filepath, maxBytes=1024, backupCount=1)
        # fileSizeJandler.setFormatter(formatter)
        # logger.addHandler(fileSizeJandler)

        # 日志文件 按日期划分 DEBUG 开发使用
        fileTimeHandler = TimedRotatingFileHandler(filepath + '.debug.log', "D", 1, 10)
        fileTimeHandler.suffix = "%Y%m%d.log"  # 设置 切分后日志文件名的时间格式 默认 filename+"." + suffix 如果需要更改需要改logging 源码
        fileTimeHandler.setFormatter(Formatter(formatter))
        fileTimeHandler.setLevel(logging.DEBUG)
        non_error_filter = logging.Filter()
        non_error_filter.filter = lambda record: record.levelno < logging.INFO
        fileTimeHandler.addFilter(non_error_filter)
        logger.addHandler(fileTimeHandler)


        # 日志文件 按日期划分 INFO 生产使用
        fileTimeHandler = TimedRotatingFileHandler(filepath + '.info.log', "D", 1, 10)
        fileTimeHandler.suffix = "%Y%m%d.log"  # 设置 切分后日志文件名的时间格式 默认 filename+"." + suffix 如果需要更改需要改logging 源码
        fileTimeHandler.setFormatter(Formatter(formatter))
        fileTimeHandler.setLevel(logging.INFO)
        non_error_filter = logging.Filter()
        non_error_filter.filter = lambda record: record.levelno < logging.WARNING
        fileTimeHandler.addFilter(non_error_filter)
        logger.addHandler(fileTimeHandler)

        # 日志文件 按日期划分  ERROR 生产使用
        fileTimeHandler = TimedRotatingFileHandler(filepath + '.error.log', "D", 1, 10)
        fileTimeHandler.suffix = "%Y%m%d.log"  # 设置 切分后日志文件名的时间格式 默认 filename+"." + suffix 如果需要更改需要改logging 源码
        fileTimeHandler.setFormatter(Formatter(formatter))
        fileTimeHandler.setLevel(logging.ERROR)
        logger.addHandler(fileTimeHandler)

        logger.setLevel(logging.DEBUG)

    # LoggerAdapter 每个用户可以持有一个adapter. 便可添加额外信息
    # new_loger=LoggerAdapter(logger.logger, {'userid':'liqe'})
    _loggers[key] = logger = LoggerAdapter(logger, {'userid': 'default'})
    assert isinstance(logger, LoggerAdapter)
    return logger
                        help='offset (earliest or latest)')
    PARSER.add_argument('--verbose',
                        dest='VERBOSE',
                        action='store_true',
                        default=False,
                        help='Flag, chatty')
    PARSER.add_argument('--debug',
                        dest='DEBUG',
                        action='store_true',
                        default=False,
                        help='Flag, Very chatty')
    ARG = PARSER.parse_args()
    LOGGER = colorlog.getLogger()
    if ARG.DEBUG:
        LOGGER.setLevel(colorlog.colorlog.logging.DEBUG)
    elif ARG.VERBOSE:
        LOGGER.setLevel(colorlog.colorlog.logging.INFO)
    else:
        LOGGER.setLevel(colorlog.colorlog.logging.WARNING)
    HANDLER = colorlog.StreamHandler()
    HANDLER.setFormatter(colorlog.ColoredFormatter())
    LOGGER.addHandler(HANDLER)
    EMPTY_FILE = 'topic_empty.txt'
    EMPTY = open(EMPTY_FILE, 'w')
    ERROR_FILE = 'topic_error.txt'
    ERROR = open(ERROR_FILE, 'w')
    OUTPUT_FILE = 'topic_aging.txt'
    OUTPUT = open(OUTPUT_FILE, 'w')
    read_messages()
    sys.exit(0)
示例#3
0
import os
import sys

try:
    import cPickle as pickle
except ImportError:  # Python 3.x
    import pickle

import colorlog
import numpy as np
from PIL import Image

logger = logging.getLogger()
logger.setLevel(colorlog.colorlog.logging.INFO)

handler = colorlog.StreamHandler()
handler.setFormatter(colorlog.ColoredFormatter())
logger.addHandler(handler)

# logger.debug("Debug message")
# logger.info("Information message")
# logger.warning("Warning message")
# logger.error("Error message")
# logger.critical("Critical message")

np.set_printoptions(threshold=sys.maxsize)
np.set_printoptions(linewidth=10000)

script_dir = os.path.dirname(__file__)
training_data_dir = os.path.join(script_dir, "histogram_training_images",
                                 "sfa", "SKIN", "5")
示例#4
0
import logging
import colorlog
import textwrap


class CustomFormatter(colorlog.TTYColoredFormatter):
    """ Colored formatter that removes indentation caused by triple quotes """
    def format(self, record):
        if isinstance(record.msg, str):
            record.msg = textwrap.dedent(record.msg)
        return super().format(record)


# Configure Logger
LOGGER = colorlog.getLogger()
HANDLER = colorlog.StreamHandler(sys.stdout)
HANDLER.setFormatter(
    CustomFormatter('%(log_color)s[%(levelname)s]: %(message)s'))
HANDLER.addFilter(lambda record: record.levelno != logging.ERROR)

ERROR_HANDLER = colorlog.StreamHandler(sys.stderr)
ERROR_HANDLER.setFormatter(
    CustomFormatter('%(log_color)s[%(levelname)s]: %(message)s'))
ERROR_HANDLER.addFilter(lambda record: record.levelno == logging.ERROR)

LOGGER.addHandler(HANDLER)
LOGGER.addHandler(ERROR_HANDLER)

# Exports to proxy our global Logger
error = LOGGER.error
debug = LOGGER.debug
示例#5
0
        pass

    def warning(self, msg, *args, **kwargs):
        pass

    def warn(self, msg, *args, **kwargs):
        pass

    def error(self, msg, *args, **kwargs):
        pass

    def exception(self, msg, *args, exc_info=True, **kwargs):
        pass

    def critical(self, msg, *args, **kwargs):
        pass

    def log(self, level, msg, *args, **kwargs):
        pass


_default_handler = colorlog.StreamHandler()
_default_handler.setFormatter(
    colorlog.ColoredFormatter(
        "%(log_color)s[%(asctime)s] <%(levelname)s>:%(name)s:%(message)s"))

default_logger = colorlog.getLogger("default_logger")
default_logger.addHandler(_default_handler)
default_logger.setLevel(INFO)
fake_logger = FakeLogger()
示例#6
0
def main(args: List[str]):
    try:
        import colorlog
    except ImportError:
        basicConfig(
            format="%(asctime)s [%(levelname)s] %(message)s",
            datefmt="%H:%M:%S",
            level=getenv('LOG_LEVEL', 'INFO'),
        )
        logger.warn('Please install colorlog: pip3 install colorlog')
    else:
        handler = colorlog.StreamHandler()
        formatter = colorlog.ColoredFormatter(
            "%(log_color)s%(asctime)s [%(levelname)s] %(message)s",
            datefmt="%H:%M:%S",
            log_colors={
                'DEBUG': 'cyan',
                'INFO': 'white',
                'WARNING': 'yellow',
                'ERROR': 'red',
                'CRITICAL': 'red,bg_white',
            })
        handler.setFormatter(formatter)
        basicConfig(level=getenv('LOG_LEVEL', 'INFO'), handlers=[handler])

    parser = argparse.ArgumentParser(description='Testcase Generator')
    parser.add_argument('toml', nargs='*', help='Toml File')
    parser.add_argument('-p',
                        '--problem',
                        nargs='*',
                        help='Generate problem',
                        default=[])

    parser.add_argument('--dev', action='store_true', help='Developer Mode')
    parser.add_argument('--test', action='store_true', help='CI Mode')
    parser.add_argument('--htmldir', help='Generate HTML', default=None)

    parser.add_argument('--html',
                        action='store_true',
                        help='Deprecated: Generate HTML')
    parser.add_argument('--verify',
                        action='store_true',
                        help='Deprecated: Verify Inputs')
    parser.add_argument('--refhash',
                        action='store_true',
                        help='Deprecated: Refresh Hash')
    parser.add_argument('--ignore-cache',
                        action='store_true',
                        help='Deprecated: Ignore cache')
    parser.add_argument('--compile-checker',
                        action='store_true',
                        help='Deprecated: Compile Checker')
    parser.add_argument('--nogen',
                        action='store_true',
                        help='Deprecated: Skip Generate')
    parser.add_argument('--sol',
                        action='store_true',
                        help='Deprecated: Solution Test')

    opts = parser.parse_args(args)

    if opts.dev and opts.test:
        raise ValueError('only one of --dev and --test can be used')

    if opts.html:
        logger.warning('--html is deprecated. Please use --dev or --test')
    if opts.verify:
        logger.warning('--verify is deprecated. Please use --dev or --test')
    if opts.refhash:
        logger.warning('--refhash is deprecated. Please use --dev')
    if opts.ignore_cache:
        logger.warning('--ignore-cache is deprecated. Please use --dev')
    if opts.compile_checker:
        logger.warning(
            '--compile-checker is deprecated. Checker is compiled in default')
    if opts.nogen:
        logger.warning(
            '--nogen is deprecated, because auto skip was implemented')
    if opts.sol:
        logger.warning(
            '--sol is deprecated. --sol is also enabled by --verify')

    libdir = Path(__file__).parent
    problems = list()  # type: List[Problem]

    for tomlpath in opts.toml:
        tomlfile = toml.load(opts.toml)
        if 'problems' in tomlfile:
            logger.warning('problems.toml is deprecated')
            continue
        problems.append(Problem(libdir, Path(tomlpath).parent))

    for problem_name in opts.problem:
        tomls = list(libdir.glob('**/{}/info.toml'.format(problem_name)))
        if len(tomls) == 0:
            logger.error('Cannot find problem: {}'.format(problem_name))
            exit(1)
        if len(tomls) >= 2:
            logger.error('Find multi problem dirs: {}'.format(problem_name))
            exit(1)
        problem_dir = tomls[0].parent
        problems.append(Problem(libdir, problem_dir))
        logger.info('Find problem dir {}'.format(problem_dir))

    if len(problems) == 0:
        logger.warning('No problems')

    if opts.htmldir:
        logger.info('make htmldir')
        Path(opts.htmldir).mkdir(exist_ok=True, parents=True)

    # suppress the annoying dialog appears when an application crashes on Windows
    if platform.uname().system == 'Windows':
        import ctypes
        SEM_NOGPFAULTERRORBOX = 2  # https://msdn.microsoft.com/en-us/library/windows/desktop/ms684863(v=vs.85).aspx
        ctypes.windll.kernel32.SetErrorMode(SEM_NOGPFAULTERRORBOX)

    # default
    force_generate = False
    ignore_warning = False
    rewrite_hash = False
    verify = False
    generate_html = False

    if opts.dev:
        force_generate = True
        ignore_warning = True
        rewrite_hash = True
        verify = True
        generate_html = True
    if opts.test:
        force_generate = True
        verify = True
        generate_html = True

    if opts.refhash:
        rewrite_hash = True
    if opts.ignore_cache:
        force_generate = True
    if opts.html:
        generate_html = True
    if opts.verify:
        verify = True

    for problem in problems:
        generate(problem,
                 force_generate=force_generate,
                 ignore_warning=ignore_warning,
                 rewrite_hash=rewrite_hash,
                 verify=verify,
                 generate_html=generate_html,
                 html_dir=Path(opts.htmldir) if opts.htmldir else None)
示例#7
0
def main() -> None:
    r"""This is where it happens \o/"""

    parser = argparse.ArgumentParser()
    parser.add_argument("--version",
                        action="version",
                        version="%(prog)s {}".format(VERSION))
    output_group = parser.add_argument_group(title="Output controls")
    testing_group = parser.add_argument_group(title="Test and debug tools")
    output_group.add_argument(
        "-v",
        "--verbose",
        action="store_true",
        dest="verbose",
        default=False,
        help="Alias for --log-level=info",
    )
    output_group.add_argument(
        "-q",
        "--quiet",
        action="store_true",
        dest="quiet",
        default=False,
        help="Alias for --log-level=critical",
    )
    testing_group.add_argument(
        "-t",
        "--test",
        action="store_true",
        dest="test",
        default=False,
        help="Test config and exit",
    )
    parser.add_argument("-p",
                        "--pidfile",
                        dest="pidfile",
                        default=None,
                        help="Write PID into this file")
    parser.add_argument(
        "-N",
        "--no-network",
        dest="no_network",
        default=False,
        action="store_true",
        help="Disable network listening socket (if enabled in config)",
    )
    output_group.add_argument(
        "-d",
        "--debug",
        dest="debug",
        default=False,
        action="store_true",
        help="Alias for --log-level=debug",
    )
    parser.add_argument(
        "-f",
        "--config",
        dest="config",
        default="monitor.ini",
        help=("configuration file (this is the main config; "
              "you also need monitors.ini (default filename)"),
    )
    parser.add_argument(
        "-j",
        "--threads",
        dest="threads",
        default=os.cpu_count(),  # default used by the library anyway
        type=int,
        help=
        (f"number of threads to run for checking monitors (default (cpus): {os.cpu_count()})"
         ),
    )
    output_group.add_argument(
        "-H",
        "--no-heartbeat",
        action="store_true",
        dest="no_heartbeat",
        default=False,
        help="Omit printing the '.' character when running checks",
    )
    testing_group.add_argument(
        "-1",
        "--one-shot",
        action="store_true",
        dest="one_shot",
        default=False,
        help=
        ("Run the monitors once only, without alerting. Require monitors without "
         '"fail" in the name to succeed. Exit zero or non-zero accordingly.'),
    )
    testing_group.add_argument(
        "--loops",
        dest="loops",
        default=-1,
        type=int,
        help="Number of iterations to run before exiting",
    )
    output_group.add_argument(
        "-l",
        "--log-level",
        dest="loglevel",
        default="warn",
        help="Log level: critical, error, warn, info, debug",
    )
    output_group.add_argument(
        "-C",
        "--no-colour",
        "--no-color",
        action="store_true",
        dest="no_colour",
        default=False,
        help="Do not colourise log output",
    )
    output_group.add_argument(
        "--no-timestamps",
        action="store_true",
        dest="no_timestamps",
        default=False,
        help="Do not prefix log output with timestamps",
    )
    testing_group.add_argument(
        "--dump-known-resources",
        action="store_true",
        dest="dump_resources",
        default=False,
        help="Print out loaded Monitor, Alerter and Logger types",
    )

    options = parser.parse_args()

    if options.dump_resources:
        import pprint
        import simplemonitor.Alerters.alerter as alerter
        import simplemonitor.Loggers.logger as logger
        import simplemonitor.Monitors.monitor as monitor

        print("Monitors:")
        pprint.pprint(sorted(monitor.all_types()), compact=True)
        print("Loggers:")
        pprint.pprint(sorted(logger.all_types()), compact=True)
        print("Alerters:")
        pprint.pprint(sorted(alerter.all_types()), compact=True)
        sys.exit(0)

    if options.quiet:
        options.loglevel = "critical"

    if options.verbose:
        options.loglevel = "info"

    if options.debug:
        options.loglevel = "debug"

    if options.no_timestamps:
        logging_timestamp = ""
    else:
        logging_timestamp = "%(asctime)s "

    try:
        log_level = getattr(logging, options.loglevel.upper())
    except AttributeError:
        print("Log level {0} is unknown".format(options.loglevel))
        sys.exit(1)

    log_datefmt = "%Y-%m-%d %H:%M:%S"
    log_plain_format = logging_timestamp + "%(levelname)8s (%(name)s) %(message)s"
    if not options.no_colour:
        try:
            handler = colorlog.StreamHandler()
            handler.setFormatter(
                colorlog.ColoredFormatter(
                    logging_timestamp +
                    "%(log_color)s%(levelname)8s%(reset)s (%(name)s) %(message)s",
                    datefmt="%Y-%m-%d %H:%M:%S",
                ))
            main_logger.addHandler(handler)
        except NameError:
            logging.basicConfig(format=log_plain_format, datefmt=log_datefmt)
            main_logger.error("Could not enable colorlog")
    else:
        logging.basicConfig(format=log_plain_format, datefmt=log_datefmt)

    main_logger.setLevel(log_level)

    if not options.quiet:
        main_logger.info("=== SimpleMonitor v%s", VERSION)
        main_logger.info("Loading main config from %s", options.config)

    m = SimpleMonitor(
        config_file=options.config,
        no_network=options.no_network,
        max_loops=options.loops,
        heartbeat=not options.no_heartbeat,
        one_shot=options.one_shot,
        max_workers=options.threads,
    )

    if options.test:
        main_logger.warning("Config test complete. Exiting.")
        sys.exit(0)

    if options.one_shot:
        main_logger.warning(
            "One-shot mode: expecting monitors without 'fail' in the name to succeed, "
            "and with to fail. Will exit zero or non-zero accordingly.")

    m.run()

    main_logger.info("Finished.")

    if options.one_shot:  # pragma: no cover
        ok = True
        print("\n--> One-shot results:")
        tail_info = []
        for this_monitor in sorted(m.monitors.keys()):
            if "fail" in this_monitor:
                if m.monitors[this_monitor].error_count == 0:
                    tail_info.append(
                        "    Monitor {0} should have failed".format(
                            this_monitor))
                    tail_info.append("        {}".format(
                        m.monitors[this_monitor].last_result))
                    ok = False
                else:
                    print(
                        "    Monitor {0} was ok (failed)".format(this_monitor))
            elif "skip" in this_monitor:
                if m.monitors[this_monitor].skipped():
                    print("    Monitor {0} was ok (skipped)".format(
                        this_monitor))
                else:
                    tail_info.append(
                        "    Monitor {0} should have been skipped".format(
                            this_monitor))
                    ok = False
            else:
                if m.monitors[this_monitor].error_count > 0:
                    tail_info.append(
                        "    Monitor {0} failed and shouldn't have: {1}".
                        format(this_monitor,
                               m.monitors[this_monitor].last_result))
                    ok = False
                    tail_info.append("        {}".format(
                        m.monitors[this_monitor].last_result))
                else:
                    print("    Monitor {0} was ok".format(this_monitor))
        if len(tail_info):
            print()
            for line in tail_info:
                print(line)
        if not ok:
            print(
                "Not all non-'fail' succeeded, or not all 'fail' monitors failed."
            )
            sys.exit(1)

    logging.shutdown()
示例#8
0
def main():
    """Entry point that parses the argument, and invokes the proper functions."""

    parser = argparse.ArgumentParser(
        description="List and create emulator docker containers ({}).".format(
            emu.__version__),
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
    )
    parser.add_argument("-v",
                        "--verbose",
                        dest="verbose",
                        action="store_true",
                        help="Set verbose logging")

    subparsers = parser.add_subparsers()

    list_parser = subparsers.add_parser(
        "list",
        help=
        "list all the available the publicly available emulators and system images."
    )

    list_parser.add_argument(
        "--arm",
        action="store_true",
        help=
        "Display arm images. Note that arm images are not hardware accelerated and are *extremely* slow.",
    )
    list_parser.set_defaults(func=list_images)

    license_parser = subparsers.add_parser(
        "licenses",
        help=
        "Lists all licenses and gives you a chance to accept or reject them.")
    license_parser.add_argument(
        "--accept",
        action="store_true",
        help="Accept all licensens after displaying them.")
    license_parser.set_defaults(func=accept_licenses)

    create_parser = subparsers.add_parser(
        "create",
        help="Given an emulator and system image zip file, "
        "generates a Docker image comprising complete environment in which the Android Emulator runs. "
        "After the Docker image is started up, interaction with the emulator is made possible via port forwarding and ADB, "
        "or gRPC and WebRTC.",
    )
    create_parser.add_argument(
        "emuzip",
        help=
        "Zipfile containing the a publicly released emulator, or (canary|stable|[0-9]+) to use the latest canary, stable, or build id of the emulator to use. "
        "Keep in mind that using a build id can result in downloading an untested pre-release emulator build from the android ci server.",
    )
    create_parser.add_argument(
        "imgzip",
        help=
        "Zipfile containing a public system image that should be launched, or a regexp matching the image to retrieve. "
        "All the matching images will be selected when using a regex. "
        'Use the list command to show all available images. For example "P google_apis_playstore x86_64".',
    )
    create_parser.add_argument(
        "--extra",
        default="",
        help=
        "Series of additional commands to pass on to the emulator. This *MUST* be the last parameter. "
        "For example: --extra -http-proxy http://example.google.com",
        nargs=argparse.REMAINDER,
    )
    create_parser.add_argument(
        "--dest",
        default=os.path.join(os.getcwd(), "src"),
        help="Destination for the generated docker files")
    create_parser.add_argument(
        "--tag",
        default="",
        help="Docker tag, defaults to the emulator build id")
    create_parser.add_argument(
        "--repo",
        default="",
        help="Repo prefix, for example: us.gcr.io/emu-dev/")
    create_parser.add_argument(
        "--push",
        action="store_true",
        help=
        "Push the created image to your repository, as marked by the --repo argument.",
    )
    create_parser.add_argument(
        "--gpu",
        action="store_true",
        help="Build an image with gpu drivers, providing hardware acceleration"
    )

    create_parser.add_argument(
        "--metrics",
        action="store_true",
        help=
        "When enabled, the emulator will send usage metrics to Google when the container exists gracefully.",
    )
    create_parser.add_argument(
        "--no-metrics",
        action="store_true",
        help="Disables the collection of usage metrics.")
    create_parser.add_argument(
        "--start",
        action="store_true",
        help="Starts the container after creating it. "
        "All exposed ports are forwarded, and your private adbkey (if available) is injected but not stored.",
    )
    create_parser.set_defaults(func=create_docker_image)

    create_inter = subparsers.add_parser(
        "interactive",
        help=
        "Interactively select which system image and emulator binary to use when creating a docker container",
    )
    create_inter.add_argument(
        "--extra",
        default="",
        help="Series of additional commands to pass on to the emulator. "
        'For example -turncfg \\"curl -s -X POST https://networktraversal.googleapis.com/v1alpha/iceconfig?key=MySec\\"',
    )
    create_inter.add_argument(
        "--dest",
        default=os.path.join(os.getcwd(), "src"),
        help="Destination for the generated docker files")
    create_inter.add_argument(
        "--gpu",
        action="store_true",
        help="Build an image with gpu drivers, providing hardware acceleration"
    )
    create_inter.add_argument(
        "--start",
        action="store_true",
        help="Starts the container after creating it. "
        "All exposed ports are forwarded, and your private adbkey (if available) is injected but not stored.",
    )
    create_inter.add_argument(
        "--arm",
        action="store_true",
        help=
        "Display arm images. Note that arm images are not hardware accelerated and are *extremely* slow.",
    )
    create_inter.set_defaults(func=create_docker_image_interactive)

    dist_parser = subparsers.add_parser(
        "cloud-build",
        help=
        "Create a cloud builder distribution. This will create a distribution for publishing container images to a GCE repository."
        "This is likely only useful if you are within Google.",
    )
    dist_parser.add_argument(
        "--repo",
        default="",
        help="Repo prefix, for example: us.gcr.io/emu-dev/")
    dist_parser.add_argument("--dest",
                             default=os.path.join(os.getcwd(), "src"),
                             help="Destination for the generated docker files")
    dist_parser.add_argument(
        "--git",
        action="store_true",
        help="Create a git commit, and push to destination.")
    dist_parser.add_argument(
        "emuzip",
        help=
        "Zipfile containing the a publicly released emulator, or (canary|stable|[0-9]+) to use the latest canary, stable, or build id of the emulator to use. "
        "Keep in mind that using a build id can result in downloading an untested pre-release emulator build from the android ci server.",
    )
    dist_parser.add_argument(
        "img",
        default="P google_apis_playstore x86_64|Q google_apis_playstore x86_64",
        help="A regexp matching the image to retrieve. "
        "All the matching images will be selected when using a regex. "
        'Use the list command to show all available images. For example "P google_apis_playstore x86_64".',
    )
    dist_parser.set_defaults(func=create_cloud_build_distribuition)
    args = parser.parse_args()

    # Configure logger.
    lvl = logging.DEBUG if args.verbose else logging.WARNING
    handler = colorlog.StreamHandler()
    handler.setFormatter(
        colorlog.ColoredFormatter("%(log_color)s%(levelname)s:%(message)s"))
    logging.root = colorlog.getLogger("root")
    logging.root.addHandler(handler)
    logging.root.setLevel(lvl)

    if hasattr(args, "func"):
        args.func(args)
    else:
        parser.print_help()
示例#9
0
def main(args: List[str]):
    try:
        import colorlog
    except ImportError:
        basicConfig(
            format="%(asctime)s [%(levelname)s] %(message)s",
            datefmt="%H:%M:%S",
            level=getenv('LOG_LEVEL', 'INFO'),
        )
        logger.warn('Please install colorlog: pip3 install colorlog')
    else:
        handler = colorlog.StreamHandler()
        formatter = colorlog.ColoredFormatter(
            "%(log_color)s%(asctime)s [%(levelname)s] %(message)s",
            datefmt="%H:%M:%S",
            log_colors={
                'DEBUG': 'cyan',
                'INFO': 'white',
                'WARNING': 'yellow',
                'ERROR': 'red',
                'CRITICAL': 'red,bg_white',
            })
        handler.setFormatter(formatter)
        basicConfig(level=getenv('LOG_LEVEL', 'INFO'), handlers=[handler])

    parser = argparse.ArgumentParser(description='Testcase Generator')
    parser.add_argument('toml', nargs='*', help='Toml File')
    parser.add_argument('-p',
                        '--problem',
                        nargs='*',
                        help='Generate problem',
                        default=[])

    parser.add_argument('--dev', action='store_true', help='Developer Mode')
    parser.add_argument('--test', action='store_true', help='CI Mode')
    parser.add_argument('--htmldir', help='Generate HTML', default=None)
    parser.add_argument('--compile-checker',
                        action='store_true',
                        help='Deprecated: Compile Checker')

    opts = parser.parse_args(args)

    if opts.dev and opts.test:
        raise ValueError('only one of --dev and --test can be used')

    if opts.compile_checker:
        logger.warning(
            '--compile-checker is deprecated. Checker is compiled in default')

    libdir = Path(__file__).parent
    problems = list()  # type: List[Problem]

    for tomlpath in opts.toml:
        tomlfile = toml.load(opts.toml)
        problems.append(Problem(libdir, Path(tomlpath).parent))

    for problem_name in opts.problem:
        problem_dir = find_problem_dir(libdir, problem_name)
        if problem_dir is None:
            raise ValueError('Cannot find problem: {}'.format(problem_name))
        problems.append(Problem(libdir, problem_dir))

    if len(problems) == 0:
        logger.warning('No problems')

    if opts.htmldir:
        logger.info('Make htmldir')
        Path(opts.htmldir).mkdir(exist_ok=True, parents=True)

    # suppress the annoying dialog appears when an application crashes on Windows
    if platform.uname().system == 'Windows':
        import ctypes
        SEM_NOGPFAULTERRORBOX = 2  # https://msdn.microsoft.com/en-us/library/windows/desktop/ms684863(v=vs.85).aspx
        ctypes.windll.kernel32.SetErrorMode(SEM_NOGPFAULTERRORBOX)

    mode = Problem.Mode.DEFAULT
    if opts.dev:
        mode = Problem.Mode.DEV
    if opts.test:
        mode = Problem.Mode.TEST

    for problem in problems:
        problem.generate(mode, Path(opts.htmldir) if opts.htmldir else None)
示例#10
0
def main():
    def replace_first_line(src_filename, target_filename, replacement_line):
        f = open(src_filename)
        first_line, remainder = f.readline(), f.read()
        t = open(target_filename, "w")
        t.write(replacement_line + "\n")
        t.write(remainder)
        t.close()

    def allXml(fList):
        fileNames = []
        for dRoot in fList:
            for fPath in glob.glob(os.path.join(dRoot, "*.xml")):
                fileNames.append(fPath)

            dPath = [d for d in os.listdir(dRoot)]
            for subDir in dPath:
                subDirPath = os.path.join(dRoot, subDir)
                fStr = subDirPath + '**/*.xml'

                for fPath in glob.glob(fStr, recursive=True):
                    fileNames.append(fPath)

        return (fileNames)

    def addToDevel(dRoot):
        rootBsName = os.path.basename(os.path.normpath(dRoot))
        # XML files in first level
        for fPath in glob.glob(os.path.join(dRoot, "*.xml")):
            if fPath:
                fBsName = os.path.basename(fPath)
                includeUrl = '    <include url="' + rootBsName + '/' + fBsName + '" />\n'
                with open(develPath, "a") as myfile:
                    myfile.write(includeUrl)
                    # XML files in second level
        dPath = [d for d in os.listdir(dRoot)]
        for subDir in dPath:
            subDirPath = os.path.join(dRoot, subDir)
            fStr = subDirPath + '**/*.xml'
            for fPath in glob.glob(fStr, recursive=True):
                if fPath:
                    fBsName = os.path.basename(fPath)
                    includeUrl = '    <include url="' + rootBsName + '/' + subDir + '/' + fBsName + '" />\n'
                    with open(develPath, "a") as myfile:
                        myfile.write(includeUrl)

    # Script only for python3
    if sys.version_info[0] < 3:
        sys.exit('Must be using Python 3')

    # Add description
    parser = argparse.ArgumentParser(description='Description')
    # parser.add_argument('-o', '--origin', action=readable_dir, dest='origin', help='Parameter which autocomplete directories')
    # parser.add_argument('-d', '--destination', dest='destination', help='Simple paramenter')
    args = parser.parse_args()

    handler = colorlog.StreamHandler()
    handler.setFormatter(
        colorlog.ColoredFormatter('%(log_color)s%(levelname)s:%(message)s'))
    logger = colorlog.getLogger()
    logger.addHandler(handler)
    logger.setLevel(level=logging.INFO)

    logger.info("Started")

    # Check .src/ exists
    rootDir = os.getcwd()
    srcDir = os.path.dirname('.src/')
    templatesDir = os.path.join(srcDir, 'templates/')
    staticDir = os.path.join(srcDir, 'static/')
    sceneTemplate = os.path.join(templatesDir, 'scene_template.html')
    develTemplate = os.path.join(templatesDir, 'devel_template.html')
    contentTemplate = os.path.join(templatesDir, 'content_template.xml')
    includeStatic = os.path.join(staticDir, 'include/')
    pluginsStatic = os.path.join(staticDir, 'plugins/')
    jsStatic = os.path.join(staticDir, 'tour.js')
    swfStatic = os.path.join(staticDir, 'tour.swf')
    panosDir = os.path.join(srcDir, 'panos')
    if not os.path.exists(srcDir):
        sys.exit(
            logger.critical(
                "There is no '.src' directory. Are you in the right directory?"
            ))
        # Check .src/config.xml exists
    configXml = os.path.join(srcDir, 'config.xml')
    if not os.path.exists(configXml):
        sys.exit(logger.critical("Where is config.xml?"))
        # Parse .src/config.xml
    tree = ET.parse(configXml)
    root = tree.getroot()

    brandList = []
    for brand in tree.findall('brand'):
        brandID = brand.get('id')
        brandList.append(brandID)
        logging.debug(brandID)

    logging.debug(brandList)

    carList = []
    for car in tree.findall('brand/car'):
        carID = car.get('id')
        carList.append(carID)
        logging.debug(carID)

    logging.debug(carList)

    # For each tour
    for carTour in carList:

        logger.info('[ ] ' + carTour)

        # Check .src/panos/car/
        panoPath = os.path.join(panosDir, carTour)
        if not os.path.exists(panoPath):
            sys.exit(logger.critical('Pano "' + panoPath + '" NOT FOUND.'))

        # Check .src/panos/car/ has jpg files
        panorama = glob.glob(os.path.join(panoPath, "*.jpg"))
        if not panorama:
            sys.exit(
                logger.critical('Folder "' + panoPath +
                                '" doesn\'t contain any JPG files.'))

        # Check car/
        tourPath = os.path.join(carTour)
        if not os.path.exists(tourPath):
            sys.exit(
                logger.critical(
                    'Folder "' + tourPath +
                    '" NOT FOUND. Did you create the tiles correctly?'))

        # Check car/files/
        filesPath = os.path.join(carTour, 'files/')
        if not os.path.exists(filesPath):
            sys.exit(
                logger.critical(
                    'Folder "' + filesPath +
                    '" NOT FOUND. Did you create the tiles correctly?'))

        # Check car/files/scenes/
        scenesPath = os.path.join(carTour, 'files/scenes/')
        if not os.path.exists(scenesPath):
            sys.exit(
                logger.critical(
                    'Folder "' + scenesPath +
                    '" NOT FOUND. Did you create the tiles correctly?'))

        # Check car/files/scenes/tiles/
        for panoFile in panorama:
            panoFile = os.path.basename(panoFile)
            panoFile = os.path.splitext(panoFile)[0]
            tilesPath = os.path.join(scenesPath, panoFile)
            if not os.path.exists(tilesPath):
                sys.exit(
                    logger.critical(
                        'Folder "' + tilesPath +
                        '" NOT FOUND. Did you create the tiles correctly?'))

            # Check car/files/scenes/scene.xml
            tilesXml = panoFile + '.xml'
            tilesXmlPath = os.path.join(scenesPath, tilesXml)
            if not os.path.exists(tilesXmlPath):
                sys.exit(
                    logger.critical(
                        'Folder "' + tilesXmlPath +
                        '" NOT FOUND. Did you create the tiles correctly?'))

        # index.html
        tourIndex = os.path.join(tourPath, 'index.html')
        for file in os.scandir(tourPath):
            if file.name.endswith(".html"):
                os.unlink(file.path)
        shutil.copy(sceneTemplate, tourIndex)

        # devel/
        tourDevelPath = os.path.join(tourPath, 'devel/')
        if not os.path.exists(tourDevelPath):
            os.makedirs(tourDevelPath)

        # devel/index.html
        tourDevel = os.path.join(tourDevelPath, 'index.html')
        for file in os.scandir(tourDevelPath):
            if file.name.endswith(".html"):
                os.unlink(file.path)
        shutil.copy(develTemplate, tourDevel)

        # files/content/
        contentPath = os.path.join(filesPath, 'content/')
        if not os.path.exists(contentPath):
            os.makedirs(contentPath)
            # files/content/index.xml
        tourContent = os.path.join(contentPath, 'index.xml')
        if not os.path.exists(tourContent):
            shutil.copy(contentTemplate, tourContent)

        # files/content/*.jpg
        tourNumber = len(panorama)
        if tourNumber > 1:
            for panoFile in panorama:
                panoFile = os.path.basename(panoFile)
                panoFile = os.path.splitext(panoFile)[0]
                thumbs = panoFile + '.jpg'
                thumbsPath = os.path.join(contentPath, thumbs)
                # print(thumbsPath)
                if not os.path.exists(thumbsPath):
                    logger.warn('Thumbnail "' + thumbsPath + '" is missing.')

        # files/include/
        includePath = os.path.join(filesPath, 'include/')
        if os.path.exists(includePath):
            shutil.rmtree(includePath)
        shutil.copytree(includeStatic, includePath)

        # files/plugins/
        pluginsPath = os.path.join(filesPath, 'plugins/')
        if os.path.exists(pluginsPath):
            shutil.rmtree(pluginsPath)
        shutil.copytree(pluginsStatic, pluginsPath)

        # Edit first line of all scenes.xml files

        i = 1
        for xmlF in glob.glob(os.path.join(scenesPath, "*.xml")):
            if os.name == 'nt':
                destF = os.path.join('C:\\', 'Users', 'Rafael', 'AppData',
                                     'Local', 'Temp', 'tempfile')
            else:
                destF = os.path.join('/tmp', 'tempfile')
            sceneNo = 'scene' + str(i)
            newLine = '<scene name="' + sceneNo + '" thumburl="%FIRSTXML%/content/' + sceneNo + '.jpg" onstart="' + sceneNo + '();">'
            replace_first_line(xmlF, destF, newLine)
            shutil.copyfile(destF, xmlF)
            i = i + 1
            safeRm(destF)

        # files/tour.js
        jsPath = os.path.join(filesPath, 'tour.js')
        if os.path.exists(jsPath):
            os.remove(jsPath)
        shutil.copyfile(jsStatic, jsPath)

        # files/tour.swf
        swfPath = os.path.join(filesPath, 'tour.swf')
        if os.path.exists(swfPath):
            os.remove(swfPath)
        shutil.copyfile(swfStatic, swfPath)

        # files/devel.xml
        develPath = os.path.join(filesPath, 'devel.xml')
        if os.path.exists(develPath):
            os.remove(develPath)
            open(develPath, 'w+')
        with open(develPath, "a") as myfile:
            myfile.write(
                '<?xml version="1.0" encoding="UTF-8"?>\n<krpano version="1.19">\n'
            )

        scenesPath = os.path.join(filesPath, 'scenes/')
        addToDevel(pluginsPath)
        addToDevel(includePath)
        addToDevel(contentPath)
        addToDevel(scenesPath)

        with open(develPath, "a") as myfile:
            myfile.write('</krpano>')

        # files/tour.xml
        tourXml = os.path.join(filesPath, 'tour.xml')
        xmlList = [pluginsPath, includePath, contentPath, scenesPath]
        fileNms = allXml(xmlList)

        safeRm(tourXml)

        with open(tourXml, 'w') as outfile:
            outfile.write('<krpano version="1.19">\n')
            for fname in fileNms:
                with open(fname) as infile:
                    for line in infile:
                        if line.rstrip():
                            if '<krpano' not in line and '</krpano>' not in line:
                                outfile.write(line)
            outfile.write('</krpano>')

        #subprocess.call(['tidy', '-modify', '--hide-comments', 'yes', '-wrap', '0', '-quiet', '-xml', tourXml])

        # Find obsolete folders
        ignored = {".no_scenes", ".src", ".env"}
        currentTours = [
            d for d in os.listdir(rootDir)
            if (os.path.isdir(d) and d not in ignored)
        ]
        compareTours = set(currentTours).difference(carList)
        if compareTours:
            for obsoleteTour in compareTours:
                logger.critical("The following folder is obsolete: " +
                                obsoleteTour)
            sys.exit()

    logger.info('EOF')
示例#11
0
def get_app(config, _app=None, with_external_mods=True, url_prefix="/api"):
    # Make sure app is a singleton
    if _app is not None:
        return _app

    app = Flask(__name__)
    app.config.update(config)

    if app.config["DEBUG"]:
        from flask.logging import default_handler
        import colorlog

        handler = colorlog.StreamHandler()
        handler.setFormatter(
            colorlog.ColoredFormatter(
                """%(log_color)s%(asctime)s %(levelname)s:%(name)s:%(message)s [in %(pathname)s:%(lineno)d]"""
            ))

        logger = logging.getLogger('werkzeug')
        logger.addHandler(handler)
        app.logger.removeHandler(default_handler)

        for l in logging.Logger.manager.loggerDict.values():
            if hasattr(l, 'handlers'):
                l.handlers = [handler]

    # else:
    #     # TODO: sourced from app.config['LOGGING']
    #     logging.basicConfig()
    #     logger = logging.getLogger()
    #     logger.setLevel(logging.INFO)
    logging.getLogger("sqlalchemy.engine").setLevel(
        getattr(sys.modules['logging'], app.config["SQLALCHEMY_DEBUG_LEVEL"]))

    CORS(app, supports_credentials=True)
    # app.config['PROPAGATE_EXCEPTIONS'] = False
    # ... brings back those cors headers on error response in debug mode
    # to trace client-side error handling
    # but drops the embedded debugger ¯\_(ツ)_/¯
    # https://github.com/corydolphin/flask-cors/issues/67
    # https://stackoverflow.com/questions/29825235/getting-cors-headers-in-a-flask-500-error

    # Bind app to DB
    db.init_app(app)

    # JWT Auth
    jwt.init_app(app)

    # Swagger for api documentation
    swagger.init_app(app)

    admin.init_app(app)

    ckeditor.init_app(app)

    with app.app_context():

        from gncitizen.core.users.routes import routes

        app.register_blueprint(routes, url_prefix=url_prefix)

        from gncitizen.core.commons.routes import routes

        app.register_blueprint(routes, url_prefix=url_prefix)

        from gncitizen.core.observations.routes import routes

        app.register_blueprint(routes, url_prefix=url_prefix)

        from gncitizen.core.ref_geo.routes import routes

        app.register_blueprint(routes, url_prefix=url_prefix)

        from gncitizen.core.badges.routes import routes

        app.register_blueprint(routes, url_prefix=url_prefix)

        from gncitizen.core.taxonomy.routes import routes

        app.register_blueprint(routes, url_prefix=url_prefix)

        # Chargement des mosdules tiers
        if with_external_mods:
            for conf, manifest, module in list_and_import_gnc_modules(app):
                try:
                    prefix = url_prefix + conf["api_url"]
                except Exception as e:
                    current_app.logger.debug(e)
                    prefix = url_prefix
                print(prefix)
                app.register_blueprint(module.backend.blueprint.blueprint,
                                       url_prefix=prefix)
                try:
                    module.backend.models.create_schema(db)
                except Exception as e:
                    current_app.logger.debug(e)
                # chargement de la configuration
                # du module dans le blueprint.config
                module.backend.blueprint.blueprint.config = conf
                app.config[manifest["module_name"]] = conf

        _app = app

        create_schemas(db)
        db.create_all()

    return app
示例#12
0
def autolog(level=DEFAULT_LOG_LEVEL,
            name=None,
            path=None,
            log_on_crash=True,
            log_filename=True,
            color_log=DEFAULT_COLOR_LOG,
            _cache={}):
    if not name:
        try:
            name = Path(sys.argv[0]).absolute().with_suffix('').name
        except IndexError:
            pass

    if name in _cache:
        return _cache[name]

    logger = logging.getLogger(name)

    filelogger = logging.getLogger('__fileonly__')

    logger.setLevel(level)

    log_file = path or Path(temp_dir(name)) / "auto.log"

    formatter = logging.Formatter(
        '%(asctime)s :: %(levelname)s :: %(pathname)s:%(lineno)s :: %(message)s'
    )
    file_handler = RotatingFileHandler(str(log_file), 'a', 1000000, 1)
    file_handler.setFormatter(formatter)
    logger.addHandler(file_handler)
    filelogger.addHandler(file_handler)

    if color_log:
        stream_handler = colorlog.StreamHandler()
        colored_formatter = colorlog.ColoredFormatter(
            '%(log_color)s%(message)s',
            log_colors={
                'WARNING': 'yellow',
                'ERROR': 'red',
                'CRITICAL': 'red,bg_white'
            })
        stream_handler.setFormatter(colored_formatter)
    else:
        stream_handler = logging.StreamHandler()
    logger.addHandler(stream_handler)

    previous_hook = sys.excepthook

    def on_crash(type, value, tb):
        filelogger.critical("The program crashed on:",
                            exc_info=(type, value, tb))
        previous_hook(type, value, tb)

    if log_on_crash:
        sys.excepthook = on_crash

    if log_filename:
        logger.info('Starting to log in "{}"'.format(log_file))

    _cache[name] = logger

    return logger
示例#13
0
import logging

import colorlog

logger = logging.getLogger()
logger.setLevel(logging.WARNING)

_sh = colorlog.StreamHandler()
_sh.setLevel(logging.DEBUG)

_color_formatter = colorlog.ColoredFormatter(
    # fmt="%(log_color)s%(levelname)-8s%(reset)s %(log_color)s%(message)s",
    fmt="%(log_color)s%(message)s",
    datefmt=None,
    reset=True,
    log_colors={
        "DEBUG": "green",
        "INFO": "cyan",
        "WARNING": "yellow",
        "ERROR": "red",
        "CRITICAL": "red,bg_yellow",
    },
    secondary_log_colors={},
    style="%",
)

_sh.setFormatter(_color_formatter)
logger.addHandler(_sh)
示例#14
0
def main():
    # Check for Python version
    if sys.version_info < (3, 5):
        print("Jetconf requires Python version 3.5 or higher")
        sys.exit(1)

    # Get Jetconf version
    try:
        jetconf_version = get_distribution("jetconf").version
    except DistributionNotFound:
        jetconf_version = "(not found)"

    # Parse command line arguments
    config_file = "config.yaml"

    try:
        opts, args = getopt.getopt(sys.argv[1:], "c:vh")
    except getopt.GetoptError:
        print("Error: invalid argument detected.")
        print_help()
        sys.exit(1)

    for opt, arg in opts:
        if opt == "-c":
            config_file = arg
        elif opt == "-v":
            print("Jetconf version {}".format(jetconf_version))
            sys.exit(0)
        elif opt == "-h":
            print_help()
            sys.exit(0)

    # Load configuration
    jc_config = config.JcConfig()
    config.CFG = jc_config

    try:
        jc_config.load_file(config_file)
    except FileNotFoundError:
        print("Configuration file does not exist")
        sys.exit(1)
    except ValueError as e:
        print("Configuration syntax error: " + str(e))
        sys.exit(1)

    # Validate configuration
    try:
        jc_config.validate()
    except ValueError as e:
        print("Error: " + str(e))
        sys.exit(1)
    
    # Set logging level
    log_level = {
        "error": logging.ERROR,
        "warning": logging.WARNING,
        "info": logging.INFO,
        "debug": logging.INFO
    }.get(jc_config.glob["LOG_LEVEL"], logging.INFO)
    logging.root.handlers.clear()

    # Daemonize
    if jc_config.glob["LOGFILE"] not in ("-", "stdout"):
        # Setup basic logging
        logging.basicConfig(
            format="%(asctime)s %(levelname)-8s %(message)s",
            level=log_level,
            filename=jc_config.glob["LOGFILE"]
        )

        # Go to background
        pid = os.fork()
        if pid != 0:
            sys.exit(0)
        os.setsid()
        os.umask(0)
        pid = os.fork()
        if pid != 0:
            sys.exit(0)

        # Close standard file descriptors
        os.close(sys.stdin.fileno())
        os.close(sys.stdout.fileno())
        os.close(sys.stderr.fileno())
        fd_null = os.open("/dev/null", os.O_RDWR)
        os.dup(fd_null)
        os.dup(fd_null)
    else:
        # Setup color logging
        log_formatter = colorlog.ColoredFormatter(
            "%(asctime)s %(log_color)s%(levelname)-8s%(reset)s %(message)s",
            datefmt=None,
            reset=True,
            log_colors={
                'DEBUG': 'cyan',
                'INFO': 'green',
                'WARNING': 'yellow',
                'ERROR': 'red',
                'CRITICAL': 'red',
            },
            secondary_log_colors={},
            style='%'
        )

        log_handler = colorlog.StreamHandler()
        log_handler.setFormatter(log_formatter)
        log_handler.stream = sys.stdout

        logger = colorlog.getLogger()
        logger.addHandler(log_handler)
        logger.setLevel(log_level)

    # Print version
    info("Jetconf version {}".format(jetconf_version))

    # Print configuration
    jc_config.print()

    # Instantiate Jetconf main class
    jc = jetconf.Jetconf(jc_config)
    jetconf.JC = jc

    try:
        jc.init()
    except JetconfInitError as e:
        error(str(e))
        jc.cleanup()

        # Exit
        info("Exiting (error)")
        sys.exit(1)

    # Run Jetconf (this will block until shutdown)
    jc.run()

    jc.cleanup()

    # Exit
    info("Exiting")
    sys.exit(0)
示例#15
0
def main():
    config_file = "config.yaml"

    # Parse command line arguments
    try:
        opts, args = getopt.getopt(sys.argv[1:], "c:")
    except getopt.GetoptError:
        print("Invalid argument detected. Possible options are: -c (config file)")
        sys.exit(1)

    for opt, arg in opts:
        if opt == "-c":
            config_file = arg

    # Load configuration
    try:
        load_config(config_file)
    except FileNotFoundError:
        print("Configuration file does not exist")
        sys.exit(1)
    except ParserError as e:
        print("Configuration syntax error: " + str(e))
        sys.exit(1)

    # Set logging level
    log_level = {
        "error": logging.ERROR,
        "warning": logging.WARNING,
        "info": logging.INFO,
        "debug": logging.INFO
    }.get(CONFIG_GLOBAL["LOG_LEVEL"], logging.INFO)
    logging.root.handlers.clear()

    # Daemonize
    if CONFIG_GLOBAL["LOGFILE"] not in ("-", "stdout"):
        # Setup basic logging
        logging.basicConfig(
            format="%(asctime)s %(levelname)-8s %(message)s",
            level=log_level,
            filename=CONFIG_GLOBAL["LOGFILE"]
        )

        # Go to background
        pid = os.fork()
        if pid != 0:
            sys.exit(0)
        os.setsid()
        os.umask(0)
        pid = os.fork()
        if pid != 0:
            sys.exit(0)

        # Close standard file descriptors
        os.close(sys.stdin.fileno())
        os.close(sys.stdout.fileno())
        os.close(sys.stderr.fileno())
        fd_null = os.open("/dev/null", os.O_RDWR)
        os.dup(fd_null)
        os.dup(fd_null)
    else:
        # Setup color logging
        log_formatter = colorlog.ColoredFormatter(
            "%(asctime)s %(log_color)s%(levelname)-8s%(reset)s %(message)s",
            datefmt=None,
            reset=True,
            log_colors={
                'DEBUG': 'cyan',
                'INFO': 'green',
                'WARNING': 'yellow',
                'ERROR': 'red',
                'CRITICAL': 'red',
            },
            secondary_log_colors={},
            style='%'
        )

        log_handler = colorlog.StreamHandler()
        log_handler.setFormatter(log_formatter)
        log_handler.stream = sys.stdout

        logger = colorlog.getLogger()
        logger.addHandler(log_handler)
        logger.setLevel(log_level)

    # Print configuration
    print_config()

    # Create pidfile
    fl = os.open(CONFIG_GLOBAL["PIDFILE"], os.O_WRONLY + os.O_CREAT, 0o666)
    try:
        os.lockf(fl, os.F_TLOCK, 0)
        os.write(fl, str(os.getpid()).encode())
        os.fsync(fl)
    except BlockingIOError:
        error("Jetconf daemon already running (pidfile exists). Exiting.")
        sys.exit(1)

    # Set signal handlers
    def sig_exit_handler(signum, frame):
        os.close(fl)
        os.unlink(CONFIG_GLOBAL["PIDFILE"])
        info("Exiting.")
        sys.exit(0)

    signal.signal(signal.SIGTERM, sig_exit_handler)
    signal.signal(signal.SIGINT, sig_exit_handler)

    # Load data model
    yang_lib_file = os.path.join(CONFIG_GLOBAL["YANG_LIB_DIR"], "yang-library-data.json")
    datamodel = DataHelpers.load_data_model(
        CONFIG_GLOBAL["YANG_LIB_DIR"],
        yang_lib_file
    )

    # Datastore init
    datastore = JsonDatastore(datamodel, CONFIG_GLOBAL["DATA_JSON_FILE"], "DNS data", with_nacm=False)
    try:
        datastore.load()
        #datastore.load_yl_data(yang_lib_file)
    except (FileNotFoundError, YangsonException) as e:
        error("Could not load JSON datastore " + CONFIG_GLOBAL["DATA_JSON_FILE"])
        error(ErrorHelpers.epretty(e))
        sig_exit_handler(0, None)

    try:
        datastore.get_data_root().validate(ValidationScope.all, ContentType.config)
    except (SchemaError, SemanticError) as e:
        error("Initial validation of datastore failed")
        error(ErrorHelpers.epretty(e))
        sig_exit_handler(0, None)

    # Register configuration data node listeners
    # TODO

    # Register op handlers
    # TODO

    # Create and register state data node listeners
    usr_state_data_handlers.create_zone_state_handlers(STATE_DATA_HANDLES, datamodel)

    # datastore callbacks TODO
    datastore.commit_begin_callback = tsn_connect
    datastore.commit_end_callback = tsn_disconnect

    # Create HTTP server
    rest_srv = RestServer()
    rest_srv.register_api_handlers(datastore)
    rest_srv.register_static_handlers()

    # Run HTTP server
    rest_srv.run()
示例#16
0
class LoggerFactory:
    logColors = {
        'DEBUG': 'white',
        'INFO': 'white',
        'WARNING': 'yellow',
        'ERROR': 'red',
        'CRITICAL': 'red',
    }

    # 读取配置文件log.properties
    prop = properties.parse('../../resources/log.properties')
    config = {}
    for key, value in prop.properties.items():
        if key.find('.') > 0:
            keys = key.split('.')
            if keys[0] == 'appender' and len(keys) > 1:
                if len(keys) == 3:
                    if keys[0] not in config:
                        config[keys[0]] = {}
                    if keys[1] not in config[keys[0]]:
                        config[keys[0]][keys[1]] = {}
                    config[keys[0]][keys[1]][keys[2]] = value
            if keys[0] == 'logger' and len(keys) > 1:
                if keys[0] not in config:
                    config[keys[0]] = {}
                config[keys[0]][key[len(keys[0]) + 1:]] = value
        else:
            config[key] = value

    # 初始化根rootLogger
    defaltLevel = logging.INFO
    if 'rootLogger' in config and config['rootLogger'].upper(
    ) in logging._nameToLevel:
        defaltLevel = logging._nameToLevel[config['rootLogger'].upper()]

    rootLogger = logging.getLogger()
    rootLogger.setLevel(defaltLevel)
    for key, handlerCfg in config['appender'].items():
        level = logging.INFO
        if 'level' in handlerCfg and handlerCfg['level'].upper(
        ) in logging._nameToLevel:
            level = logging._nameToLevel[handlerCfg['level'].upper()]

        file = './log.txt'
        if 'file' in handlerCfg:
            file = handlerCfg['file']

        maxBytes = 1024 * 1  # 默认文件大小为5M
        if 'maxBytes' in handlerCfg:
            maxBytes = handlerCfg['maxBytes']

        backups = 5  # 默认保留日志文件数
        if 'backups' in handlerCfg:
            backups = handlerCfg['backups']

        formatter = logging.BASIC_FORMAT
        if 'formatter' in handlerCfg:
            formatter = handlerCfg['formatter']

        if handlerCfg['type'] == 'file':
            formatter = logging.Formatter(formatter)

            # 使用RotatingFileHandler类,滚动备份日志
            # handler = RotatingFileHandler(filename = file,
            #                               mode = 'a',
            #                               encoding = 'utf-8',
            #                               maxBytes = maxBytes,
            #                               backupCount = backups)

            handler = LocalTimedRotatingFileHandler(filename=file,
                                                    when="MIDNIGHT",
                                                    interval=1,
                                                    backupCount=backups,
                                                    encoding='UTF-8')

            handler.set_name(key)
            handler.setLevel(level)
            handler.setFormatter(formatter)
            rootLogger.addHandler(handler)

        elif handlerCfg['type'] == 'console':
            formatter = colorlog.ColoredFormatter('%(log_color)s' + formatter,
                                                  log_colors=logColors)
            handler = colorlog.StreamHandler()
            handler.set_name(key)
            handler.setLevel(level)
            handler.setFormatter(formatter)
            rootLogger.addHandler(handler)

    pkgLevel = {}
    if 'logger' in config:
        pkgLevel = config['logger']

    @staticmethod
    def getLogger(klass=None):
        _name = None
        if klass is None:
            pass
        elif isinstance(klass, str):
            _name = klass
        elif isinstance(type(klass), object):
            _name = type(klass).__name__

        logger = logging.getLogger(_name)

        if _name:
            level = LoggerFactory.defaltLevel
            lenth = len(_name)
            levelName = None
            for path in LoggerFactory.pkgLevel:
                if _name.startswith(path):
                    if path == _name:
                        levelName = LoggerFactory.pkgLevel[path]
                        break
                    elif _name.startswith(path + '.') and len(path) < lenth:
                        lenth = len(path)
                        levelName = LoggerFactory.pkgLevel[path]

            if levelName and levelName.upper() in logging._nameToLevel:
                level = logging._nameToLevel[levelName.upper()]
            logger.setLevel(level)

        return logger
示例#17
0
def main():
    parser = argparse.ArgumentParser(
        description='Merges all the XML files into \
    "tour.xml"')
    parser.add_argument('-a', action='store_true', dest='alltractors', \
                              default=False, help='Create tour.xml files for all tractors')

    args = parser.parse_args()

    handler = colorlog.StreamHandler()
    handler.setFormatter(
        colorlog.ColoredFormatter('%(log_color)s%(levelname)s:%(message)s',
                                  log_colors={
                                      'DEBUG': 'green',
                                      'INFO': 'cyan'
                                  }))
    logger = colorlog.getLogger()
    logger.addHandler(handler)
    logger.setLevel(level=logging.INFO)

    logger.info("Started")
    alltours = []
    bad_words = ['<krpano', '</krpano>', '<krpano version', 'coordfinder']
    bad_folders = ['shared', '.DS_Store', 'files']
    bad_files = ['coordfinder', 'editor_and_options']
    shared_dir_here = os.path.join(os.getcwd(), "shared")
    shared_dir_prev = os.path.join(
        os.path.abspath(os.path.join(os.getcwd(), os.pardir)), "shared")
    if os.path.exists(shared_dir_here):
        shared_dir = shared_dir_here
        all_tractors = True
    else:
        if os.path.exists(shared_dir_prev):
            shared_dir = shared_dir_prev
            all_tractors = False
        else:
            sys.exit(['You are not in the righ directory'])

    # shared_dir_abs = os.path.join(os.path.expanduser("~"), "virtual-tours", "tractor-media", "shared")
    plugins_dir = os.path.join(shared_dir, "plugins")
    include_dir = ''.join(glob.glob(os.path.join(shared_dir, "include")))

    if all_tractors:
        for brand in os.listdir(os.getcwd()):
            if (brand not in bad_folders):
                for tour in os.listdir(os.path.join(os.getcwd(), brand)):
                    tour_path = os.path.join(os.getcwd(), brand, tour)
                    if os.path.isdir(tour_path):
                        if not tour.startswith('.'):
                            tour = os.path.join(os.getcwd(), brand, tour)
                            alltours.append(tour)
    else:
        for tour in os.listdir(os.getcwd()):
            if os.path.isdir(os.path.join(os.getcwd(), tour)):
                if not tour.startswith('.'):
                    if not any(bad_folder in tour
                               for bad_folder in bad_folders):
                        alltours.append(tour)

    alltours.sort(reverse=False)

    for tour in alltours:
        logger.info("Tour: " + os.path.basename(tour))
        allxmlfiles = []
        # XML files inside content/ folder
        contentxmlfiles = sorted(
            glob.glob(tour + "/files/content/*.xml", recursive=True))
        for item in contentxmlfiles:
            allxmlfiles.append(item)
            logger.info('[ -- ] ' + os.path.relpath(item, os.getcwd()))

        # XML files inside plugins/ folder
        if os.path.exists(shared_dir):
            pluginsxmlfiles = sorted(
                glob.glob(plugins_dir + "/*.xml", recursive=True))
        else:
            pluginsxmlfiles = sorted(
                glob.glob(tour + "/files/plugins/*.xml", recursive=True))
        for item in pluginsxmlfiles:
            allxmlfiles.append(item)
            logger.info('[ -- ] ' + os.path.relpath(item, os.getcwd()))

        # XML files inside include/ folder
        if (os.path.exists(shared_dir)) and (os.path.exists(include_dir)):
            sharedxmlfiles = sorted(
                glob.glob(include_dir + "/**/*.xml", recursive=True))
        else:
            sharedxmlfiles = sorted(
                glob.glob(tour + "/files/include/**/*.xml", recursive=True))
        for item in sharedxmlfiles:
            if not any(bad_file in item for bad_file in bad_files):
                allxmlfiles.append(item)
                logger.info('[ -- ] ' + os.path.relpath(item, os.getcwd()))

        # XML files inside scenes/ folder
        scenesxmlfiles = sorted(glob.glob(tour + "/files/scenes/*.xml",
                                          recursive=True),
                                key=numericalSort)
        for item in scenesxmlfiles:
            allxmlfiles.append(item)
            logger.info('[ -- ] ' + os.path.relpath(item, os.getcwd()))

        # Merge files into tour.xml
        tourxml = os.path.join(tour, 'files', 'tour.xml')
        safeRm(tourxml)
        with open(tourxml, 'w', encoding='utf-8') as outfile:
            outfile.writelines(
                '<?xml version="1.0" encoding="UTF-8"?>\n<krpano version="1.19">\n'
            )
            for line in fileinput.input(
                    allxmlfiles,
                    mode="rU",
                    openhook=fileinput.hook_encoded("utf-8-sig")):
                if not any(bad_word in line for bad_word in bad_words):
                    if line.rstrip():
                        outfile.write(line)
            outfile.writelines("</krpano>")
        logger.info('[ OK ] ' + tourxml)
        allxmlfiles = []

    logger.info("_EOF_")
示例#18
0
def configure_module_logger(
    logger,
    logger_name,
    log_level,
    debug,
    log_timestamp,
    log_file,
):
    # Avoid continually setting up a new logger on every new web request.
    if hasattr(logger, 'initialised') and logger.initialised:
        return
    # Validate input.
    v.validate_string(logger_name, 'logger_name', 'configure_module_logger')
    v.validate_string(log_level, 'log_level', 'configure_module_logger')
    v.validate_boolean(debug, 'debug', 'configure_module_logger')
    if log_timestamp is not None:
        v.validate_boolean(log_timestamp, 'log_timestamp',
                           'configure_module_logger')
    if log_file is not None:
        v.validate_string(log_file, 'log_file', 'configure_module_logger')
    # Configure logger.
    logger.propagate = False
    level_str = log_level
    level_str = 'debug' if debug else level_str
    levels = {
        'error': logging.ERROR,
        'warning': logging.WARNING,
        'info': logging.INFO,
        'debug': logging.DEBUG,
    }
    level = levels[level_str]
    logger.setLevel(level)
    logger.level_str = level_str

    # Add a convenience method.
    # Use camelCase to match logging module convention.

    def setLevelStr(level_str):
        level = levels[level_str]
        logger.setLevel(level)
        logger.level_str = level_str

    logger.setLevelStr = setLevelStr
    # Construct log_format.
    # Example log_format:
    # '%(asctime)s %(levelname)-8s [%(name)s: %(lineno)s (%(funcName)s)] %(message)s'
    # Example logLine:
    # 2020-11-19 13:14:10 DEBUG    [demo1.basic: 19 (hello)] Entered into basic.hello.
    log_format = '[' + logger_name + ': %(lineno)s (%(funcName)s)] %(message)s'
    # Note: In "%(levelname)-8s", the '8' pads the levelname length with spaces up to 8 characters, and the hyphen left-aligns the levelname.
    log_format = '%(levelname)-8s ' + log_format
    if log_timestamp:
        log_format = '%(asctime)s ' + log_format
    log_formatter = logging.Formatter(fmt=log_format,
                                      datefmt='%Y-%m-%d %H:%M:%S')
    log_formatter2 = None
    if colorlog_imported:
        log_format_color = log_format.replace('%(levelname)',
                                              '%(log_color)s%(levelname)')
        log_format_color = log_format_color.replace(
            '%(message)', '%(message_log_color)s%(message)')
        # Example log_format_color:
        # '%(asctime)s %(log_color)s%(levelname)-8s [%(name)s: %(lineno)s (%(funcName)s)] %(message_log_color)s%(message)s'
        log_formatter2 = colorlog.ColoredFormatter(
            log_format_color,
            datefmt='%Y-%m-%d %H:%M:%S',
            reset=
            True,  # Clear all formatting (both foreground and background colors).
            # log_colors controls the base text color for particular log levels.
            # A second comma-separated value, if provided, controls the background color.
            log_colors={
                'DEBUG': 'blue',
                'INFO': 'green',
                'WARNING': 'yellow',
                'ERROR': 'red',
                'CRITICAL': 'red,bg_white',
            },
            # secondary_log_colors controls the value of message_log_color.
            # If a level is commented out, the message text will have the base text color (which is set in log_colors).
            secondary_log_colors={
                'message': {
                    'DEBUG': 'blue',
                    # 'INFO': 'white',
                    'WARNING': 'white',
                    'ERROR': 'white',
                    'CRITICAL': 'white',
                }
            },
        )
    # Set up console handler.
    if not colorlog_imported:
        # 1) Standard console handler:
        console_handler = logging.StreamHandler()
        console_handler.setLevel(level)
        console_handler.setFormatter(log_formatter)
        logger.addHandler(console_handler)
    else:
        # 2) Colored console handler:
        console_handler2 = colorlog.StreamHandler()
        console_handler2.setLevel(level)
        console_handler2.setFormatter(log_formatter2)
        logger.addHandler(console_handler2)
    # Set up file handler.
    if log_file:
        # Create log_file directory if it doesn't exist.
        log_dir = os.path.dirname(log_file)
        if log_dir != '':
            if not os.path.exists(log_dir):
                os.makedirs(log_dir)
        # Note: If log file already exists, new log lines will be appended to it.
        file_handler = logging.FileHandler(log_file, mode='a', delay=True)
        # If delay is true, then file opening is deferred until the first call to emit().
        file_handler.setLevel(level)
        # It turns out that the colorLog formatter's ANSI escape codes work in 'cat' & 'tail' (but not vim).
        # 'less' can, with the -R flag.
        # To display in vim, strip the escape chars: $ sed 's|\x1b\[[;0-9]*m||g' somefile | vim -
        if not colorlog_imported:
            file_handler.setFormatter(log_formatter)
        else:
            file_handler.setFormatter(log_formatter2)
        logger.addHandler(file_handler)
    logger.initialised = True
示例#19
0
def main():
    """
    Parse command arguments, initialize log and start launch `run_server`

    :return:        0 in case of success, between 1 and 127 in case of failure
    :rtype:         int
    """

    # Initialise and parse command arguments
    parser = argparse.ArgumentParser(description="Api service")
    parser.add_argument('--redis-host', '-H', help="Redis-server host")
    parser.add_argument('--redis-port',
                        '-P',
                        help='Redis-server connection port')
    parser.add_argument('--redis-data-db',
                        '-i',
                        help="Redis-server database index for data")
    parser.add_argument('--redis-pubsub-db',
                        '-j',
                        help="Redis-server database index for events")
    parser.add_argument('--log-level', '-l', help="log level (ex: info)")
    parser.add_argument(
        '--log-output',
        help="log out, file path, 'syslog', 'stderr' or 'stdout'")
    parser.add_argument('--reload',
                        '-r',
                        action='store_true',
                        help="Auto restart execution on code change")
    parser.add_argument('--pid-file', '-p', help="Pid file to set")

    args = parser.parse_args()

    # Load config
    conf = core.api_util.get_conf()
    api_name = conf.get('general', 'api_name')
    server_name = conf.get('general', "server")

    # Initialise logging
    if args.log_level:
        log_level = args.log_level.strip().upper()
    elif conf.has_section("log") and conf.has_option("log", "server_level"):
        log_level = conf.get("log", "server_level").strip().upper()
    else:
        log_level = "WARNING"
    log_level_int = logging.getLevelName(log_level)
    if not type_util.is_int(log_level_int):
        sys.stderr.write("Error: Invalid logging level " + repr(log_level) +
                         "\n")
        sys.stderr.flush()
        return 1
    logging.getLogger().setLevel(
        logging.INFO if log_level_int < logging.INFO else log_level_int)
    log.setLevel(log_level_int)

    if args.log_output:
        log_output = args.log_output.strip().lower()
    elif conf.has_section("log") and conf.has_option("log", "server_output"):
        log_output = conf.get("log", "server_output").strip().lower()
    else:
        log_output = "stderr"
    if log_output in ("stderr", "stdout"):
        log_file = sys.stderr if log_output == "stderr" else sys.stdout
        if log_file.isatty():
            use_color = not util.env_is_off("LOG_COLOR")
        else:
            use_color = util.env_is_on("LOG_COLOR")
        if use_color:
            log_format = "%(log_color)s%(levelname)-8s%(blue)s%(name)-16s%(reset)s %(white)s%(message)s"
            log_handler = colorlog.StreamHandler(stream=log_file)
            log_handler.setFormatter(colorlog.ColoredFormatter(log_format))
        else:
            log_handler = logging.StreamHandler(stream=log_file)
            log_handler.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
    elif log_output == "syslog":
        log_handler = logging.handlers.SysLogHandler(address='/dev/log')
        log_handler.setFormatter(
            logging.Formatter(
                '%(levelname)s %(module)s P%(process)d T%(thread)d %(message)s'
            ))
    else:
        log_handler = logging.FileHandler(log_output)
        log_handler.setFormatter(
            logging.Formatter(
                '%(asctime)s: %(levelname)-7s: %(name)s - %(message)s'))
    logging.getLogger().addHandler(log_handler)
    log.addHandler(log_handler)
    log.propagate = False

    # Get Redis config
    if args.redis_host:
        redis_host = args.redis_host.strip()
    elif conf.has_section("redis") and conf.has_option("redis", "host"):
        redis_host = conf.get("redis", "host").strip()
    else:
        redis_host = "localhost"
    if args.redis_port:
        redis_port = int(args.redis_port.strip())
    elif conf.has_section("redis") and conf.has_option("redis", "port"):
        redis_port = int(conf.get("redis", "port").strip())
    else:
        redis_port = 6379
    if args.redis_data_db:
        redis_data_db = int(args.redis_data_db.strip())
    elif conf.has_section("redis") and conf.has_option("redis", "data_db"):
        redis_data_db = int(conf.get("redis", "data_db").strip())
    else:
        redis_data_db = 0
    if args.redis_pubsub_db:
        redis_pubsub_db = int(args.redis_pubsub_db.strip())
    elif conf.has_section("redis") and conf.has_option("redis", "pubsub_db"):
        redis_pubsub_db = int(conf.get("redis", "pubsub_db").strip())
    else:
        redis_pubsub_db = 1

    if args.reload:
        auto_reload = True
    elif util.env_is_on("AUTO_RELOAD_CODE"):
        auto_reload = True
    else:
        auto_reload = False

    # Launch the main function
    try:
        run_server(api_name,
                   server_name,
                   redis_host,
                   redis_port,
                   redis_data_db,
                   redis_pubsub_db,
                   auto_reload=auto_reload,
                   log_level=log_level_int,
                   log_output=log_output,
                   pid_file=args.pid_file)
    except error_util.abort_errors:
        logging.getLogger("aziugo").info("Signal received, exiting")
        return 0
    except error_util.all_errors as e:
        logging.getLogger("aziugo").exception(str(e))
        return 2
    return 0
示例#20
0
__copyright__ = 'Copyright 2020, {project_name}'
__credits__ = ['{credit_list}']
__license__ = '{license}'
__version__ = get_version()
__maintainer__ = 'J. Agustin BARRACHINA'
__email__ = '[email protected]; [email protected]'
__status__ = '{dev_status}'

# logging.getLogger('tensorflow').disabled = True     # Removes https://github.com/tensorflow/tensorflow/issues/41557

STRING_FORMATTER = "%(asctime)s — %(levelname)s - %(module)s::%(funcName)s line %(lineno)s — %(message)s"

# file_handler = logging.FileHandler(create_folder("./log/logs/") / "logs.log")
# formatter = logging.Formatter(STRING_FORMATTER)
# file_handler.setFormatter(formatter)

# https://github.com/borntyping/python-colorlog
# https://stackoverflow.com/a/23964880/5931672
console_handler = colorlog.StreamHandler()
console_handler.setFormatter(colorlog.ColoredFormatter('%(log_color)s' + STRING_FORMATTER))

logger = colorlog.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.addHandler(console_handler)
# logger.addHandler(file_handler)





示例#21
0
# Only set up the logger if it hasn't already been initialised before:
if not len(logger.handlers) > 0:
    logger.setLevel(logging.DEBUG)

    lfile_formatter = logging.Formatter(
        '[%(asctime)s] [%(levelname)s]\t%(message)s',
        datefmt='%Y-%m-%d %H:%M:%S')
    lfile_path = os.path.abspath('/tmp/{}_elektronn2.log'.format(user_name))
    lfile_level = logging.DEBUG
    lfile_handler = logging.FileHandler(lfile_path)
    lfile_handler.setLevel(lfile_level)
    lfile_handler.setFormatter(lfile_formatter)
    logger.addHandler(lfile_handler)

    if colorize:
        lstream_handler = colorlog.StreamHandler(sys.stdout)
        lstream_handler.setFormatter(
            colorlog.LevelFormatter(fmt=log_level_formats,
                                    log_colors=log_colors))
    else:
        lstream_handler = logging.StreamHandler(sys.stdout)
    # set this to logging.DEBUG to enable output for logger.debug() calls
    lstream_level = logging.INFO
    lstream_handler.setLevel(lstream_level)
    logger.addHandler(lstream_handler)

    logger.propagate = False

    if False:  # Test log levels:
        logger.critical('== critical')
        logger.error('== error')
示例#22
0
def get_app(config, _app=None, url_prefix="/api"):
    # Make sure app is a singleton
    if _app is not None:
        return _app

    app = Flask(__name__)
    app.config.update(config)

    if app.config["DEBUG"]:
        # pylint: disable=import-outside-toplevel
        from flask.logging import default_handler
        import colorlog

        handler = colorlog.StreamHandler()
        handler.setFormatter(
            colorlog.ColoredFormatter(
                """%(log_color)s%(asctime)s %(levelname)s:%(name)s:%(message)s [%(pathname)s:%(lineno)d]"""  # noqa: E501
            ))

        logger = logging.getLogger("werkzeug")
        logger.addHandler(handler)
        app.logger.removeHandler(default_handler)

        for l in logging.Logger.manager.loggerDict.values():
            if hasattr(l, "handlers"):
                l.handlers = [handler]

    else:
        logging.getLogger("werkzeug").setLevel(logging.WARNING)
    #     # TODO: sourced from app.config['LOGGING']
    #     logging.basicConfig()
    #     logger = logging.getLogger()
    #     logger.setLevel(logging.INFO)
    logging.getLogger("sqlalchemy.engine").setLevel(
        getattr(sys.modules["logging"], app.config["SQLALCHEMY_DEBUG_LEVEL"]))

    CORS(app, supports_credentials=True)
    # app.config['PROPAGATE_EXCEPTIONS'] = False
    # ... brings back those cors headers on error response in debug mode
    # to trace client-side error handling
    # but drops the embedded debugger ¯\_(ツ)_/¯
    # https://github.com/corydolphin/flask-cors/issues/67
    # https://stackoverflow.com/questions/29825235/getting-cors-headers-in-a-flask-500-error

    # Bind app to DB
    db.init_app(app)

    # JWT Auth
    jwt.init_app(app)

    # Swagger for api documentation
    app.json_encoder = LazyJSONEncoder
    app.config["SWAGGER"].update({
        "termsOfService":
        "/".join([
            # TODO: sync config with frontend
            app.config["URL_APPLICATION"],
            # or "assets/cgu.pdf" for fr locale
            "/en/assets/termsOfUse.pdf",
        ]),
        "swaggerUiPrefix":
        LazyString(lambda: request.environ.get("HTTP_X_SCRIPT_NAME", "")),
    })
    swagger.init_app(app)

    admin.init_app(app)

    with app.app_context():
        # noqa: E501  pylint: disable=import-outside-toplevel
        from gncitizen.core.users.routes import routes as users_routes

        app.register_blueprint(users_routes, url_prefix=url_prefix)

        from gncitizen.core.commons.routes import routes as commons_routes

        app.register_blueprint(commons_routes, url_prefix=url_prefix)

        from gncitizen.core.observations.routes import routes as observations_routes

        app.register_blueprint(observations_routes, url_prefix=url_prefix)

        from gncitizen.core.ref_geo.routes import routes as geo_routes

        app.register_blueprint(geo_routes, url_prefix=url_prefix)

        from gncitizen.core.taxonomy.routes import routes as taxonomy_routes

        app.register_blueprint(taxonomy_routes, url_prefix=url_prefix)

        # Chargement des modules tiers
        # if with_external_mods:
        for conf, manifest, module in list_and_import_gnc_modules(app):
            try:
                prefix = url_prefix + conf["api_url"]
            except Exception as e:
                current_app.logger.debug(e)
                prefix = url_prefix
            app.register_blueprint(module.backend.blueprint.blueprint,
                                   url_prefix=prefix)
            try:
                module.backend.models.create_schema(db)
            except Exception as e:
                current_app.logger.debug(e)

            # chargement de la configuration
            # du module dans le blueprint.config
            module.backend.blueprint.blueprint.config = conf
            app.config[manifest["module_name"]] = conf

        _app = app

        create_schemas(db)
        db.create_all()

        cli.register(app)

        @app.shell_context_processor
        def make_shell_context():  # pylint: disable=unused-variable
            return {"db": db}

    return app
示例#23
0
文件: main.py 项目: tueda/polybench
def main(
    *,
    args: Optional[Sequence[str]] = None,
    stderr_color_hook: Optional[Callable[[bool], None]] = None,
) -> None:
    """Entry point."""
    # First, parse the arguments.

    if args is None:
        args = sys.argv[1:]

    defined_problem_types = get_problem_type_input_args()
    defined_exp_dists = get_exponents_distribution_args()

    parser = argparse.ArgumentParser(prog="polybench")
    parser.add_argument(
        "--type",
        default="nontrivial-gcd",
        choices=defined_problem_types,
        help="set the type of the problems:"
        " trivial-gcd [gcd(a*b,c*d)],"
        " nontrivial-gcd [gcd(a*g,b*g)],"
        " trivial-factor [factor(a*b+c)]"
        " or nontrivial-factor [factor(a*b)]"
        " (default: nontrivial-gcd)",
        metavar="TYPE",
    )
    parser.add_argument(
        "--nproblems",
        default=50,
        type=int,
        help="set the number of problems (default: 50)",
        metavar="N",
    )
    parser.add_argument(
        "--nwarmups",
        default=10,
        type=int,
        help="set the number of warm-up problems (default: 10)",
        metavar="N",
    )
    parser.add_argument(
        "--exp-dist",
        default="uniform",
        choices=defined_exp_dists,
        help="set the exponents distribution: uniform or sharp (default: uniform)",
        metavar="DIST",
    )
    parser.add_argument(
        "--nvars",
        default=5,
        type=int,
        help="set the number of variables (default: 5)",
        metavar="N",
    )
    parser.add_argument(
        "--min-nterms",
        default=None,
        type=int,
        help="set the minimum number of terms in a basic-block polynomial"
        " (default: max-nterms * 0.75)",
        metavar="N",
    )
    parser.add_argument(
        "--max-nterms",
        default=30,
        type=int,
        help="set the maximum number of terms in a basic-block polynomial"
        " (default: 30)",
        metavar="N",
    )
    parser.add_argument(
        "--min-degree",
        default=None,
        type=int,
        help="set the minimum degree of basic-block polynomials"
        " (default: max-degree * 0.75 for exp-dist=uniform, 0 for exp-dist=sharp)",
        metavar="N",
    )
    parser.add_argument(
        "--max-degree",
        default=30,
        type=int,
        help="set the maximum degree of basic-block polynomials (default: 30)",
        metavar="N",
    )
    parser.add_argument(
        "--min-coeff",
        default=None,
        type=int,
        help="set the minimum coefficient (default: - max_coeff)",
        metavar="N",
    )
    parser.add_argument(
        "--max-coeff",
        default=2 ** 14,
        type=int,
        help="set the maximum coefficient (default: 2^14)",
        metavar="N",
    )
    parser.add_argument(
        "--build-directory",
        default=None,
        type=str,
        help="set the build directory (default: build)",
        metavar="DIR",
    )
    parser.add_argument(
        "--output-directory",
        default=None,
        type=str,
        help="set the output directory (default: output)",
        metavar="DIR",
    )
    parser.add_argument(
        "--seed",
        default=42,
        type=int,
        help="set the random seed",
        metavar="N",
    )
    parser.add_argument(
        "--timeout",
        default=60 * 60,
        type=int,
        help="set the timeout in seconds (default: 1 hour)",
        metavar="N",
    )
    parser.add_argument(
        "--color",
        default="auto",
        choices=["auto", "always", "never"],
        help="specify whether to use color for the terminal output:"
        " auto, always or never (default: auto)",
        metavar="MODE",
    )
    parser.add_argument(
        "--build-only",
        action="store_true",
        help="build executables but skip actual benchmarks",
    )
    parser.add_argument(
        "--keep-temp",
        action="store_true",
        help="don't delete temporary files",
    )
    parser.add_argument(
        "--debug",
        action="store_true",
        help="enable the debug mode",
    )
    parser.add_argument(
        "--all",
        action="store_true",
        help="run all solvers",
    )
    for c in Solver.get_solver_classes():
        name = c._name
        if c._env_var:
            extra_info = f" (environment variable: {c._env_var})"
        else:
            extra_info = ""
        parser.add_argument(
            f"--{name.lower()}",
            action="append_const",
            const=f"{name}",
            help=f"run {name} solver{extra_info}",
            dest="solvers",
        )

    opts = parser.parse_args(args=args)

    # Initialise colours in the terminal before other things.
    color = cast(str, opts.color)
    strip: Optional[bool] = None  # for "auto"
    if color == "always":
        strip = False
    elif color == "never":
        strip = True
    colorama.deinit()  # See: https://github.com/tartley/colorama/issues/205
    old_stderr = sys.stderr  # must be the original one
    colorama.init(strip=strip)
    if stderr_color_hook:
        stderr_color_hook(old_stderr == sys.stderr)

    problem_type = cast(ProblemTypeInput, opts.type)
    n_problems = cast(int, opts.nproblems)
    n_warmups = cast(int, opts.nwarmups)
    exp_dist = cast(ExponentsDistribution, opts.exp_dist)
    n_vars = cast(int, opts.nvars)
    max_n_terms = cast(int, opts.max_nterms)
    max_degree = cast(int, opts.max_degree)
    max_coeff = cast(int, opts.max_coeff)
    seed = cast(int, opts.seed)
    timeout = cast(int, opts.timeout)
    build_only = cast(bool, opts.build_only)
    keep_temp = cast(bool, opts.keep_temp)
    debug = cast(bool, opts.debug)

    if opts.min_nterms is not None:
        min_n_terms = cast(int, opts.min_nterms)
    else:
        min_n_terms = max(int(max_n_terms * 0.75), 1)

    if opts.min_degree is not None:
        min_degree = cast(int, opts.min_degree)
    else:
        if exp_dist == "uniform":
            min_degree = max(int(max_degree * 0.75), 0)
        else:
            min_degree = 0

    if opts.min_coeff is not None:
        min_coeff = cast(int, opts.min_coeff)
    else:
        min_coeff = -max_coeff

    if opts.build_directory is not None:
        build_dir = Path(opts.build_directory)
    else:
        build_dir = Path(".") / "build"

    build_dir = build_dir.resolve()

    if opts.output_directory is not None:
        output_dir = Path(opts.output_directory)
    else:
        output_dir = Path(".") / "output"

    output_dir = output_dir.resolve()

    if not opts.solvers and not opts.all:
        raise ValueError(
            "no solvers specified. You need to specify at least one solver to be run"
        )

    # Create problems.

    problems = ProblemSet(
        problem_type=problem_type,
        n_warmups=n_warmups,
        n_problems=n_problems,
        seed=seed,
        exp_dist=exp_dist,
        n_vars=n_vars,
        min_n_terms=min_n_terms,
        max_n_terms=max_n_terms,
        min_degree=min_degree,
        max_degree=max_degree,
        min_coeff=min_coeff,
        max_coeff=max_coeff,
    )

    # Set up the logger.

    job_id = next_job_id(output_dir)
    output_dir.mkdir(parents=True, exist_ok=True)
    log_file = output_dir / f"{job_id}.log"

    logger = logging.getLogger(__name__).getChild("Bench")

    if debug:
        logger.setLevel(logging.DEBUG)
    else:
        logger.setLevel(logging.INFO)

    def name_filter(record: logging.LogRecord) -> bool:
        record.name = record.name.split(".")[-1]
        return True

    stream_handler = colorlog.StreamHandler()
    stream_handler.addFilter(name_filter)
    stream_handler.setFormatter(
        colorlog.ColoredFormatter(
            "{log_color}[{levelname:^8}] {name:12}{reset}{message_log_color}{message}",
            style="{",
            log_colors={
                "DEBUG": "cyan",
                "INFO": "green",
                "WARNING": "yellow",
                "ERROR": "red",
                "CRITICAL": "red,bg_white",
            },
            secondary_log_colors={
                "message": {"WARNING": "yellow", "ERROR": "red", "CRITICAL": "red"}
            },
        )
    )
    logger.addHandler(stream_handler)

    logger.info(f"log_file = {log_file}")  # Before the log file is opened.

    log_file_handler = logging.FileHandler(log_file)
    log_file_handler.addFilter(name_filter)
    log_file_handler.setFormatter(
        logging.Formatter(
            "{asctime} [{levelname:^8}] {name:12}{message}",
            style="{",
            datefmt="%Y-%m-%d %H:%M:%S",
        )
    )
    logger.addHandler(log_file_handler)

    # Create solvers.

    solvers = Solver.create_solvers(
        job_id=job_id,
        build_dir=build_dir,
        output_dir=output_dir,
        logger=logger,
        timeout=timeout,
    )

    if opts.solvers:
        unknown_solvers = [
            s for s in opts.solvers if all(s.lower() != t.name.lower() for t in solvers)
        ]

        if unknown_solvers:
            raise ValueError(f"unknown solvers specified: {unknown_solvers}")

    if not opts.all:
        solvers = [
            s for s in solvers if any(s.name.lower() == t.lower() for t in opts.solvers)
        ]

    # Do benchmarks.

    config_log(
        logger,
        problem_type=problem_type,
        n_warmups=n_warmups,
        n_problems=n_problems,
        exp_dist=exp_dist,
        n_vars=n_vars,
        min_n_terms=min_n_terms,
        max_n_terms=max_n_terms,
        min_degree=min_degree,
        max_degree=max_degree,
        min_coeff=min_coeff,
        max_coeff=max_coeff,
        build_dir=build_dir,
        output_dir=output_dir,
        job_id=job_id,
        seed=seed,
        timeout=timeout,
        build_only=build_only,
        keep_temp=keep_temp,
        debug=debug,
    )

    solvers = prepare_solvers(solvers, problems)

    if solvers and not build_only:
        run_solvers(
            solvers,
            problems,
            job_id=job_id,
            output_dir=output_dir,
            logger=logger,
            keep_temp=keep_temp,
        )
示例#24
0
def main(argv):
    command = CommandManager()

    # create the top-level parser
    parser = argparse.ArgumentParser()
    subparsers = parser.add_subparsers()
    subparsers.required = True
    subparsers.dest = 'command'

    # create the parser for the "prepare" command
    subparser = subparsers.add_parser('prepare',
                                      help="prepare source directory")
    subparser.set_defaults(func=command.prepare)
    subparser.add_argument('--fetch', action='store_true',
                           help='force to fetch all repositories')
    subparser.add_argument('--update-feeds', action='store_true',
                           help='force to update all feeds')

    # create the parser for the "clean" command
    subparser = subparsers.add_parser('clean',
                                      help="clean source directory")
    subparser.set_defaults(func=command.clean)
    subparser.add_argument('--purge', action='store_true',
                           help='reset all repositories to its initial state')

    # create the parser for the "prepare" command
    subparser = subparsers.add_parser('config',
                                      help="change default configuration of LEDE project")
    subparser.set_defaults(func=command.config)
    subparser.add_argument('--kernel', action='store_true',
                           help='configure Linux kernel')

    # create the parser for the "build" command
    subparser = subparsers.add_parser('build',
                                      help="build image for current configuration")
    subparser.set_defaults(func=command.build)
    subparser.add_argument('-j', '--jobs', type=int,
                           help='specifies the number of jobs to run simultaneously')
    subparser.add_argument('-v', '--verbose', action='store_true',
                           help='show all commands during build process')
    subparser.add_argument('-k', '--key',
                           help='specify path to build key in a format <secret>[:<public>]; '
                                'when the <public> key is omitted then <secret>.pub is used')
    subparser.add_argument('target', nargs='*',
                           help='build only specific targets when specified')

    # create the parser for the "build-version" command
    subparser = subparsers.add_parser('build-version',
                                      help="get version for current build")
    subparser.set_defaults(func=command.build_version)
    subparser.add_argument('-s', '--short', action='store_true',
                           help='short version without commit suffix')

    # create the parser for the "deploy" command
    subparser = subparsers.add_parser('deploy',
                                      help="deploy selected image to target device")
    subparser.set_defaults(func=command.deploy)
    subparser.add_argument('--mac', nargs='?',
                           help='MAC address of bOS device (it is also used for remote host name determination)')
    subparser.add_argument('--hostname', nargs='?',
                           help='ip address or hostname of remote bOS device with ssh server')
    subparser.add_argument('--pool-url', nargs='?',
                           help='address of pool server in a format <host>[:<port>]')
    subparser.add_argument('--pool-user', nargs='?',
                           help='name of pool worker')
    subparser.add_argument('--uenv', choices=['mac', 'factory_reset', 'sd_images', 'sd_boot'], nargs='*',
                           help='enable some options in uEnv.txt for SD images')
    subparser.add_argument('--feeds-base', nargs='?',
                           help='URL to the Packages file for concatenation with new feeds index '
                                '(for local_feeds target only)')
    subparser.add_argument('target', nargs='*',
                           help='list of targets for deployment (local target can specify also output directory '
                                'in a format <target>[:<path>])')

    # create the parser for the "status" command
    subparser = subparsers.add_parser('status',
                                      help="show status of LEDE repository and all dependent projects")
    subparser.set_defaults(func=command.status)

    # create the parser for the "debug" command
    subparser = subparsers.add_parser('debug',
                                      help="debug application on remote target")
    subparser.set_defaults(func=command.debug)

    # create the parser for the "toolchain" command
    subparser = subparsers.add_parser('toolchain',
                                      help="set environment for LEDE toolchain")
    subparser.set_defaults(func=command.toolchain)

    # create the parser for the "release" command
    subparser = subparsers.add_parser('release',
                                      help="create branch with configuration for release version")
    subparser.set_defaults(func=command.release)
    subparser.add_argument('--no-fetch', action='store_true',
                           help='do not force fetching all repositories before creating release configuration')
    subparser.add_argument('--no-push', action='store_true',
                           help='do not push changes to upstream')

    # create the parser for the "key" command
    subparser = subparsers.add_parser('key',
                                      help="generate build key pair for signing firmware tarball and packages")
    subparser.set_defaults(func=command.key)
    subparser.add_argument('secret',
                           help='path to secret key output')
    subparser.add_argument('public', nargs='?',
                           help='path to public key output; when omitted then <secret>.pub is used')

    # add global arguments
    parser.add_argument('--log', choices=['error', 'warn', 'info', 'debug'], default='info',
                        help='logging level')
    parser.add_argument('--config', default=bos_builder.DEFAULT_CONFIG,
                        help='path to configuration file')
    parser.add_argument('--platform', choices=['zynq-dm1-g9', 'zynq-dm1-g19', 'zynq-dm1-g29', 'zynq-am1-s9'], nargs='?',
                        help='change default bOS platform')

    # parse command line arguments
    args = parser.parse_args(argv)

    # create color handler
    handler = colorlog.StreamHandler()
    handler.setFormatter(colorlog.ColoredFormatter(log_colors={
        'DEBUG':    'cyan',
        'INFO':     'green',
        'WARNING':  'yellow',
        'ERROR':    'red',
        'CRITICAL': 'red,bg_white',
    }))

    # set logging level
    logging.basicConfig(level=getattr(logging, args.log.upper()), handlers=[handler])

    # set arguments
    command.set_args(argv, args)

    # call sub-command
    args.func()
示例#25
0
def main():
    """
    Parse command arguments, initialize log and start the job

    :return:        0 in case of success, between 1 and 127 in case of failure
    :rtype:         int
    """

    # Initialise and parse command arguments
    parser = argparse.ArgumentParser(description="Run a specific job")
    parser.add_argument('--fork',
                        "-f",
                        action="store_true",
                        help="Run double fork to daemonize the process")
    parser.add_argument('--redis-host', '-H', help="Redis-server host")
    parser.add_argument('--redis-port',
                        '-P',
                        help='Redis-server connection port')
    parser.add_argument('--redis-data-db',
                        '-i',
                        help="Redis-server database index for data")
    parser.add_argument('--redis-pubsub-db',
                        '-j',
                        help="Redis-server database index for events")
    parser.add_argument('--log-level', '-l', help="log level (ex: info)")
    parser.add_argument(
        '--log-output',
        help="log out, file path, 'syslog', 'stderr' or 'stdout'")
    parser.add_argument('job_id', type=int, help="The id of the job to run")
    parser.add_argument('toolchain', help="The toolchain to launch")

    args = parser.parse_args()

    # Load config
    conf = core.api_util.get_conf()
    api_name = conf.get('general', 'api_name')
    server_name = conf.get('general', "server")

    # Initialise logging
    if args.log_level:
        log_level = args.log_level.strip().upper()
    elif conf.has_section("log") and conf.has_option("log", "server_level"):
        log_level = conf.get("log", "server_level").strip().upper()
    else:
        log_level = "WARNING"
    log_level_int = logging.getLevelName(log_level)
    if not type_util.is_int(log_level_int):
        sys.stderr.write("Error: Invalid logging level " + repr(log_level) +
                         "\n")
        sys.stderr.flush()
        return 1
    logging.getLogger().setLevel(
        logging.INFO if log_level_int < logging.INFO else log_level_int)
    log.setLevel(log_level_int)

    if args.log_output:
        log_output = args.log_output.strip().lower()
    elif conf.has_section("log") and conf.has_option("log", "server_output"):
        log_output = conf.get("log", "server_output").strip().lower()
    else:
        log_output = "stderr"
    if log_output in ("stderr", "stdout"):
        log_file = sys.stderr if log_output == "stderr" else sys.stdout
        if log_file.isatty():
            use_color = not util.env_is_off("LOG_COLOR")
        else:
            use_color = util.env_is_on("LOG_COLOR")
        if use_color:
            log_format = "%(log_color)s%(levelname)-8s%(blue)s%(name)-16s%(reset)s %(white)s%(message)s"
            log_handler = colorlog.StreamHandler(stream=log_file)
            log_handler.setFormatter(colorlog.ColoredFormatter(log_format))
        else:
            log_handler = logging.StreamHandler(stream=log_file)
            log_handler.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
    elif log_output == "syslog":
        log_handler = logging.handlers.SysLogHandler(address='/dev/log')
        log_handler.setFormatter(
            logging.Formatter(
                '%(levelname)s %(module)s P%(process)d T%(thread)d %(message)s'
            ))
    else:
        log_handler = logging.FileHandler(log_output)
        log_handler.setFormatter(
            logging.Formatter(
                '%(asctime)s: %(levelname)-7s: %(name)s - %(message)s'))
    logging.getLogger().addHandler(log_handler)
    log.addHandler(log_handler)
    log.propagate = False

    # Get Redis config
    if args.redis_host:
        redis_host = args.redis_host.strip()
    elif conf.has_section("redis") and conf.has_option("redis", "host"):
        redis_host = conf.get("redis", "host").strip()
    else:
        redis_host = "localhost"
    if args.redis_port:
        redis_port = int(args.redis_port.strip())
    elif conf.has_section("redis") and conf.has_option("redis", "port"):
        redis_port = int(conf.get("redis", "port").strip())
    else:
        redis_port = 6379
    if args.redis_data_db:
        data_db = int(args.redis_data_db.strip())
    elif conf.has_section("redis") and conf.has_option("redis", "data_db"):
        data_db = int(conf.get("redis", "data_db").strip())
    else:
        data_db = 0
    if args.redis_pubsub_db:
        pubsub_db = int(args.redis_pubsub_db.strip())
    elif conf.has_section("redis") and conf.has_option("redis", "pubsub_db"):
        pubsub_db = int(conf.get("redis", "pubsub_db").strip())
    else:
        pubsub_db = 1

    # Launch the main function
    try:
        if init_process(args.fork, api_name, int(args.job_id), args.toolchain):
            init_data_sources(api_name, server_name, redis_host, redis_port,
                              data_db, pubsub_db)
            run_toolchain(api_name, server_name, int(args.job_id),
                          args.toolchain)
    except KeyboardInterrupt:
        logging.getLogger("aziugo").info("Signal received, exiting")
        return 0
    except (StandardError, subprocess.CalledProcessError) as e:
        logging.getLogger("aziugo").exception(str(e))
        return 2
    return 0
示例#26
0
def main():
    # We prepend a hash to each log line so that the output of this utility
    # can be more easily composed with other scripts, since our logging will
    # look like comments.
    if sys.stdout.isatty():
        log_format = "# %(log_color)s%(levelname)-8s%(reset)s %(message)s"
        formatter = colorlog.ColoredFormatter(log_format,
                                              datefmt=None,
                                              reset=True,
                                              log_colors={
                                                  'DEBUG': 'cyan',
                                                  'INFO': 'green',
                                                  'WARNING': 'yellow',
                                                  'ERROR': 'red',
                                                  'CRITICAL': 'red,bg_white',
                                              },
                                              secondary_log_colors={},
                                              style='%')
        handler = colorlog.StreamHandler(sys.stdout)
    else:
        log_format = "# %(levelname)-8s %(message)s"
        formatter = logging.Formatter(log_format, datefmt=None)
        handler = logging.StreamHandler(sys.stdout)
    handler.setFormatter(formatter)
    logger.addHandler(handler)
    logger.setLevel(logging.INFO)

    parser = argparse.ArgumentParser()
    parser.add_argument(dest='command',
                        default=['template'],
                        nargs='*',
                        help='the command to run. Valid commands are %s ]' %
                        str(valid_commands))
    parser.add_argument('--yes', '-y', action='store_true', default=False)
    parser.add_argument('--verbose', '-v', action='store_true', default=False)
    parser.add_argument('--dry-run',
                        dest='dry_run',
                        action='store_true',
                        default=False)
    parser.add_argument('--explain',
                        dest='explain',
                        action='store_true',
                        default=False)
    parser.add_argument(
        '--chart',
        dest='chart',
        help=
        'apply the action to only this chart. may be a directory, tgz, or a name from the config file'
    )
    parser.add_argument('--helm-registry-url',
                        dest='helm_registry_url',
                        help='the full helm registry URL to use for charts')
    parser.add_argument('--config-file',
                        '-f',
                        action='append',
                        dest='config_files',
                        help='the config files.',
                        default=['ankh.yaml'])
    parser.add_argument('--kube-context',
                        dest='kube_context',
                        help='the kube context to use.')
    parser.add_argument('--release',
                        dest='release',
                        help='the release to target.')
    parser.add_argument('--kubeconfig',
                        dest='kubeconfig',
                        help='the kube config to use.')
    parser.add_argument('--ankhconfig',
                        dest='ankhconfig',
                        help='the ankh config to use.',
                        default=os.environ.get(
                            "ANKHCONFIG",
                            os.environ.get("HOME", "") + "/.ankh/config"))
    args = parser.parse_args()

    # context commands require no global_config nor any of the optional command-line args.
    try:
        command = args.command[0]
        if command == 'config':
            if len(args.command) < 2:
                logger.error(
                    "need at least 2 arguments for the `config` subcommand")
                sys.exit(1)
            subcommand = args.command[1]
            if subcommand == 'view':
                global_config = gather_config(args, log=True)
                yaml.dump(global_config, sys.stdout, default_flow_style=False)
                return 0
            if subcommand == 'current-context':
                return current_context_command(args)
            if subcommand == 'get-contexts':
                return get_contexts_command(args)
            if subcommand == 'use-context':
                if len(args.command) != 3:
                    logger.error("use-context subcommand requires an argument")
                    sys.exit(1)
                ctx_arg = args.command[2]
                return use_context_command(args, ctx_arg)
    except KeyboardInterrupt:
        logger.info("Interrupted")
        sys.exit(1)

    # We need the first pass of config to determine dependencies.
    logger.info("Gathering global configuration...")
    global_config = gather_config(args, log=True)

    dependencies = []
    if 'admin_dependencies' in global_config:
        if global_config.get('cluster_admin', False):
            logger.debug(
                "Current context has cluster_admin: adding admin_dependencies")
            dependencies.extend(global_config['admin_dependencies'])
        else:
            logger.debug(
                "Current context does not have cluster_admin: skipping admin_dependencies"
            )
    if 'dependencies' in global_config:
        dependencies.extend(global_config['dependencies'])

    if len(dependencies) == 0:
        # common-case: only run in this directory
        return run('.', args)

    # Satisfy each dependency by changing into that directory and running.
    # Recursive dependencies are prevented in run().
    logger.debug("Found dependencies: %s" % str(dependencies))
    for dep in dependencies:
        logger.info("Satisfying dependency: %s" % dep)
        old_dir = os.getcwd()
        os.chdir(dep)
        r = run(dep, args)
        os.chdir(old_dir)
        if r != 0:
            return r
    return 0
示例#27
0
def main():
    parser = argparse.ArgumentParser(
        description=
        'This script makes sure all the duplicated cars has its corresponding intries in config.xml. It compares: 1. gb and ie (Audi, Seat and Volkswagen 2. gb and ae (Lexus)',
        usage='compare-V10Countries.py (Run form any directory)')
    args = parser.parse_args()

    handler = colorlog.StreamHandler()
    handler.setFormatter(
        colorlog.ColoredFormatter('%(log_color)s%(levelname)s:%(message)s'))
    logger = colorlog.getLogger()
    logger.addHandler(handler)
    logger.setLevel(level=logging.INFO)

    logger.info("Started")

    if os.name == 'nt':
        tree = ET.parse('E:\\virtual_tours\\gforces\\cars\\.src\\config.xml')
    else:
        tree = ET.parse('/media/e/virtual_tours/gforces/cars/.src/config.xml')

    root = tree.getroot()

    # Compare ie and gb

    # Make a list containing all the Audi, Seat and Volkswagen models in gb
    gbList = []
    for car in tree.findall('country/brand/model/car'):
        carID = car.get('id')
        if carID.startswith('gb_audi') or carID.startswith(
                'gb_seat') or carID.startswith('gb_volkswagen'):
            # Remove the first 3 caracters = country code
            gbList.append(carID[3:])

    # Make a list containing all the Audi, Seat and Volkswagen models in ie
    ieList = []
    for car in tree.findall('country/brand/model/car'):
        carID = car.get('id')
        if carID.startswith('ie_audi') or carID.startswith(
                'ie_seat') or carID.startswith('ie_volkswagen'):
            # Remove the first 3 caracters = country code
            ieList.append(carID[3:])

    comp1 = sorted(set(gbList).difference(ieList))
    if comp1:
        print('  ====================')
        print('  Ireland missing cars')
        print('  ====================')
        for ieCar in comp1:
            ieCar = '  ie_' + ieCar
            print(ieCar)

    comp2 = sorted(set(ieList).difference(gbList))
    if comp2:
        print('  ================')
        print('  GB missing cars:')
        print('  ================')
        for gbCar in comp2:
            gbCar = '  gb_' + gbCar
            print(gbCar)

    # Compare ae and gb

    # Make a list containing all the cars in <ignore>
    ignoreList = []
    for ignoredCar in tree.findall('ignore/car'):
        carID = ignoredCar.get('id')
        ignoreList.append(carID[3:])

    # Make a list containing all the Lexus models in ae
    aeList = []
    for car in tree.findall('country/brand/model/car'):
        carID = car.get('id')
        if carID.startswith('ae_lexus'):
            # Remove the first 3 caracters = country code
            aeList.append(carID[3:])

    # Make a list containing all the Lexus models in gb
    gbList = []
    for car in tree.findall('country/brand/model/car'):
        carID = car.get('id')
        if carID.startswith('gb_lexus'):
            # Remove the first 3 caracters = country code
            gbList.append(carID[3:])

    comp3a = set(aeList).difference(gbList)
    comp3b = comp3a.difference_update(ignoreList)
    if comp3b:
        sorted(comp3b)
        print('  ===================')
        print('  Dubai missing cars')
        print('  ===================')
        for aeCar in comp3b:
            aeCar = '  ae_' + aeCar
            print(aeCar)

    comp4a = set(aeList).difference(gbList)
    comp4b = comp4a.difference_update(ignoreList)
    if comp4b:
        sorted(comp4b)
        print('  ===============')
        print('  GB missing cars')
        print('  ===============')
        for gbCar in comp4b:
            gbCar = '  gb_' + gbCar
            print(gbCar)

    if not comp1 and not comp2 and not comp3b and not comp4b:
        print('  All OK!!!')
        logger.info('EOF')
示例#28
0
def main():
    # Script only for python3
    if sys.version_info[0] < 3:
        sys.exit('Must be using Python 3')

    parser = argparse.ArgumentParser(
        description='Creates tiles for cars with visualiser option.')
    parser.add_argument('-i', action='store_false', dest='ignoreunderscores', \
                              default=True, help='Skip checking 4 underscores')
    args = parser.parse_args()

    handler = colorlog.StreamHandler()
    handler.setFormatter(
        colorlog.ColoredFormatter('%(log_color)s%(levelname)s:%(message)s',
                                  log_colors={
                                      'DEBUG': 'green',
                                      'INFO': 'cyan'
                                  }))
    logger = colorlog.getLogger()
    logger.addHandler(handler)
    logger.setLevel(level=logging.INFO)

    logger.info("Started")

    if query_yes_no('Do you need WebVR support?'):
        webvr = "yes"
    else:
        webvr = "no"

    if query_yes_no('Do you want 1024px preview?'):
        preview1024 = "yes"
    else:
        preview1024 = "no"

    parentdir = os.path.basename(os.path.abspath('..'))
    if parentdir == 'gforces':
        panosdir = os.path.join(os.path.expanduser('~'), 'virtual-tours',
                                'gforces', 'cars', '.src', 'panos')
    else:
        panosdir = os.path.join(os.getcwd(), '.src', 'panos')

    delete_kmem_folder()

    # Delete any residual files or folders
    for root, _, _ in os.walk(panosdir):
        for filepath in glob.glob(os.path.join(root, "*.kro")):
            if os.path.isfile(filepath):
                os.remove(filepath)
        for dirpath in glob.glob(os.path.join(root, "output")):
            if os.path.isdir:
                shutil.rmtree(dirpath)

    # Check if the directory containing the panos is empty
    if not os.listdir(panosdir):
        sys.exit('ERROR: Panos directory is empty')

    # Build list 'allitems' with ALL the items inside .src/panos and all subdirectories excluding 'output'
    # Build list 'allitemnames' to check that they have only 3 underscores
    allitems = []
    allitemsname = []
    tours = sorted(glob.glob(panosdir + '/*'))
    for item in tours:
        if os.path.isfile(item):
            allitems.append(item)
            allitemsname.append(item)
        if os.path.isdir(item):
            allitemsname.append(item)
            subtours = sorted(glob.glob(item + "/*"))
            for subitem in subtours:
                allitems.append(subitem)
        if 'output' in item:
            allitems.remove('output')
            allitemsname.remove('output')

    # Check there aren't duplicated items
    duplicates = []
    for i in tours:
        iname = os.path.basename(i)
        ibasename = os.path.splitext(iname)[0]
        duplicates.append(ibasename)

    repeatedcar = [
        item for item, count in collections.Counter(duplicates).items()
        if count > 1
    ]
    if repeatedcar:
        sys.exit('ERROR: The following cars are repeated: ' + str(repeatedcar))

    # Check panorama names has 3 underscores. It can be ignored with the switch '-ignoreunderscores'
    for tourname in allitemsname:
        if args.ignoreunderscores != False:
            tourbasename = os.path.basename(tourname)
            splitbasename = os.path.splitext(tourbasename)[0]
            underscores = len(splitbasename.split('_'))
            if underscores != 4:
                sys.exit('ERROR: File ' + tourbasename +  ' contains ' + str(underscores) \
                         + ' underscores instead of 4. Please rename it.')

    # Check if tiles are needed
    for car in allitems:
        # logger.info('car: ' + car)
        carbasename = os.path.basename(os.path.dirname(car))
        tourbasename = os.path.splitext(os.path.basename(car))[0]
        filesdir = os.path.join(tourbasename, 'files')
        scenesdir = os.path.join(tourbasename, 'files', 'scenes')
        parentdir = os.path.basename(os.path.abspath('..'))
        krdir = os.path.join('/', 'Users', 'rafael', 'Documents', 'software',
                             'virtual-tours', 'krpano')
        krpath = os.path.join(krdir, 'bin', 'krpanotools')
        krtemplates = os.path.join(krdir, 'krpano_conf', 'templates')
        if webvr == "yes":
            if preview1024 == "yes":
                krconfig = '-config=' + krtemplates + '/tiles_for_vr_preview_1024.config'
            else:
                krconfig = '-config=' + krtemplates + '/tiles_for_vr.config'
        else:
            if preview1024 == "yes":
                krconfig = '-config=' + krtemplates + '/tiles_for_mobile_preview_1024.config'
            else:
                krconfig = '-config=' + krtemplates + '/tiles_for_mobile.config'

        krcall = [krpath, "makepano", krconfig, car]

        if (parentdir == 'gforces') or ("manufacturer" in parentdir):
            if 'scene' in tourbasename:
                case = 'GForces - Visualiser'
                carbasename = os.path.basename(os.path.dirname(car))
                filesdir = os.path.join(carbasename, 'files')
                scenesdir = os.path.join(carbasename, 'files', 'scenes')
                tilesdir = os.path.join(scenesdir, tourbasename)
                outputdir = os.path.join(panosdir, carbasename, 'output')
                outputtilesdir = os.path.join(outputdir, 'scenes',
                                              tourbasename)
                outputxmlfile = os.path.join(outputdir, tourbasename) + '.xml'
                replaceorigin = 'scenes/' + tourbasename
                replacedest = '%ROOT%/../' + carbasename + '/files/scenes/' + tourbasename
                xmlfile = os.path.join(carbasename, 'files', 'scenes',
                                       tourbasename) + '.xml'
                xmlfilebck = os.path.join(carbasename, 'files', 'scenes',
                                          tourbasename) + '_bck.xml'
                message = carbasename + '/' + tourbasename
            else:
                case = 'GForces - Other'
                carbasename = tourbasename
                filesdir = os.path.join(carbasename, 'files')
                scenesdir = os.path.join(carbasename, 'files', 'scenes')
                tilesdir = os.path.join(scenesdir, 'tiles')
                outputdir = os.path.join(panosdir, 'output')
                outputtilesdir = os.path.join(outputdir, 'scenes',
                                              tourbasename)
                outputxmlfile = os.path.join(outputdir, tourbasename) + '.xml'
                replaceorigin = 'scenes/' + tourbasename
                replacedest = '%ROOT%/../' + tourbasename + '/files/scenes/tiles'
                message = tourbasename + '/' + carbasename
                xmlfile = os.path.join(tourbasename, 'files', 'scenes',
                                       'scene') + '.xml'
                xmlfilebck = os.path.join(tourbasename, 'files', 'scenes',
                                          'scene') + '_bck.xml'
示例#29
0
from io import StringIO
import colorlog


streams = {
    "stdout": sys.stdout,
    "stderr": sys.stderr
}

stream = os.getenv("MAGMA_LOG_STREAM", "stderr")
if stream not in streams:
    logging.warning(f"Unsupported value for MAGMA_LOG_STREAM: {stream} "
                    "using stderr instead")
log_stream = streams.get(stream, sys.stderr)
log = logging.getLogger("magma")
handler = colorlog.StreamHandler(log_stream)
handler.setFormatter(colorlog.ColoredFormatter(
    '%(name)s:%(log_color)s%(levelname)s%(reset)s:%(message)s'))
log.addHandler(handler)


level = os.getenv("MAGMA_LOG_LEVEL", "INFO")
if level in ["DEBUG", "WARN", "INFO"]:
    log.setLevel(getattr(logging, level))
elif level is not None:
    logging.warning(f"Unsupported value for MAGMA_LOG_LEVEL: {level}")


__magma_include_wire_traceback = os.getenv("MAGMA_INCLUDE_WIRE_TRACEBACK", False)

示例#30
0
def main():
    def embeddata(dirname):
        logger.info('[   ] ' + dirname)
        filenames = sorted(os.listdir(dirname))

        for filename in filenames:
            if filename.endswith('.jpg') or filename.endswith('.JPG')\
                    or filename.endswith('.jpeg') or filename.endswith('.JPEG'):
                filepath = os.path.join(dirname, filename)
                filename_base, filename_ext = os.path.splitext(filename)
                logger.info('[ - ] ' + filename_base)

                correct_input = False
                while not correct_input:
                    try:
                        ath = int(input("INFO [ > ] H: "))
                        atv = int(input("INFO [ > ] V: "))
                    except ValueError:
                        print("Please enter integers only")
                    else:
                        correct_input = True

                posehead = '-PoseHeadingDegrees=' + str(ath)
                initialheading = '-InitialViewHeadingDegrees=' + str(ath)
                initialpitch = '-InitialViewPitchDegrees=' + str(atv)

                subprocess.run([
                    'exiftool', '-overwrite_original', '-Make=RICOH',
                    '-Model=RICOH THETA S', '-ProjectionType=equirectangular',
                    '-UsePanoramaViewer=True',
                    '-CroppedAreaImageWidthPixels=6000',
                    '-CroppedAreaImageHeightPixels=3000',
                    '-FullPanoWidthPixels=6000', '-FullPanoHeightPixels=3000',
                    '-CroppedAreaLeftPixels=0', '-CroppedAreaTopPixels=0',
                    posehead, initialheading, initialpitch,
                    '-InitialViewRollDegrees=0',
                    '-InitialHorizontalFOVDegrees=75', filepath
                ])

    # Add description
    parser = argparse.ArgumentParser(
        description='Embeds some metadata to turn a panorama \
    into interactive image in Facebook, Google+, etc...')
    parser.add_argument('-d',
                        '--directory',
                        action=readable_dir,
                        dest='dirname',
                        help='Search for files only in this directory')
    # parser.add_argument('-d', '--destination', dest='destination', help='Simple paramenter')
    args = parser.parse_args()

    handler = colorlog.StreamHandler()
    handler.setFormatter(
        colorlog.ColoredFormatter('%(log_color)s%(levelname)s:%(message)s'))
    logger = colorlog.getLogger()
    logger.addHandler(handler)
    logger.setLevel(level=logging.NOTSET)

    logger.info("Started")

    dirname = args.dirname
    path = '.'

    if dirname:
        embeddata(dirname)
    else:
        dirs = [
            d for d in os.listdir(path) if os.path.isdir(os.path.join(path, d))
        ]
        for dirname in dirs:
            embeddata(dirname)

    logger.info('EOL')