コード例 #1
0
ファイル: search.py プロジェクト: rosenjcb/Sefaria-Project
            sheetrank_dict = json.load(fin)
    except IOError:
        sheetrank_dict = {}


init_pagesheetrank_dicts()
all_gemara_indexes = library.get_indexes_in_category("Bavli")
davidson_indexes = all_gemara_indexes[:all_gemara_indexes.index("Horayot") + 1]

es_client = Elasticsearch(SEARCH_ADMIN)
index_client = IndicesClient(es_client)

tracer = logging.getLogger('elasticsearch')
tracer.setLevel(logging.CRITICAL)
#tracer.addHandler(logging.FileHandler('/tmp/es_trace.log'))
tracer.addHandler(NullHandler())

doc_count = 0


def delete_text(oref, version, lang):
    try:
        not_merged_name = get_new_and_current_index_names('text')['current']
        merged_name = get_new_and_current_index_names('merged')['current']

        id = make_text_doc_id(oref.normal(), version, lang)
        es_client.delete(index=not_merged_name, doc_type='text', id=id)
        id = make_text_doc_id(oref.normal(), None, lang)
        es_client.delete(index=merged_name, doc_type='text', id=id)
    except Exception, e:
        logger.error(u"ERROR deleting {} / {} / {} : {}".format(
コード例 #2
0
ファイル: __init__.py プロジェクト: KOLANICH/distlib
# Copyright (C) 2012-2019 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
import logging

__version__ = '0.3.2.dev0'


class DistlibException(Exception):
    pass


try:
    from logging import NullHandler
except ImportError:  # pragma: no cover

    class NullHandler(logging.Handler):
        def handle(self, record):
            pass

        def emit(self, record):
            pass

        def createLock(self):
            self.lock = None


logger = logging.getLogger(__name__)
logger.addHandler(NullHandler())
コード例 #3
0
    'HTTPSConnectionPool',
    'PoolManager',
    'ProxyManager',
    'HTTPResponse',
    'Retry',
    'Timeout',
    'add_stderr_logger',
    'connection_from_url',
    'disable_warnings',
    'encode_multipart_formdata',
    'get_host',
    'make_headers',
    'proxy_from_url',
)

logging.getLogger(__name__).addHandler(NullHandler())


def add_stderr_logger(level=logging.DEBUG):
    """
    Helper for quickly adding a StreamHandler to the logger. Useful for
    debugging.

    Returns the handler after adding it.
    """
    # This method needs to be in this post_list.html to get the __name__ correct
    # even if urllib3 is vendored within another package.
    logger = logging.getLogger(__name__)
    handler = logging.StreamHandler()
    handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
    logger.addHandler(handler)
コード例 #4
0
ファイル: util.py プロジェクト: t3rmin4t0r/impyla
def get_logger_and_init_null(logger_name):
    logger = logging.getLogger(logger_name)
    logger.addHandler(NullHandler())
    return logger
コード例 #5
0
# -*- coding: utf-8 -*-
import logging
try:  # Python 2.7+
    from logging import NullHandler
except ImportError:

    class NullHandler(logging.Handler):
        def emit(self, record):
            pass


logging.getLogger('pymailutils').addHandler(NullHandler())
コード例 #6
0
ファイル: __init__.py プロジェクト: sarahb55/glue
try:
    from sip import setapi
except ImportError:
    pass
else:
    setapi('QString', 2)
    setapi('QVariant', 2)

import sys
from ._mpl_backend import MatplotlibBackendSetter
sys.meta_path.append(MatplotlibBackendSetter())

import logging
from logging import NullHandler

logging.getLogger('glue').addHandler(NullHandler())


def custom_viewer(name, **kwargs):
    """
    Create a custom interactive data viewer.

    To use this, first create a new variable by calling custom_viewer.
    Then, register one or more viewer functions using decorators.

    :param name: The name of the new viewer
    :type name: str

    Named arguments are used to build widgets and pass data
    to viewer functions. See ``specifying widgets`` below.
コード例 #7
0
def get_default_logger():
    """Get the default dkimpy logger."""
    logger = logging.getLogger('dkimpy')
    if not logger.handlers:
        logger.addHandler(NullHandler())
    return logger
コード例 #8
0
ファイル: vmware.py プロジェクト: vharishgup/ansible-1
import time
import ConfigParser

from six import text_type, string_types

# Disable logging message trigged by pSphere/suds.
try:
    from logging import NullHandler
except ImportError:
    from logging import Handler

    class NullHandler(Handler):
        def emit(self, record):
            pass

logging.getLogger('psphere').addHandler(NullHandler())
logging.getLogger('suds').addHandler(NullHandler())

from psphere.client import Client
from psphere.errors import ObjectNotFoundError
from psphere.managedobjects import HostSystem, VirtualMachine, ManagedObject, Network, ClusterComputeResource
from suds.sudsobject import Object as SudsObject


class VMwareInventory(object):

    def __init__(self, guests_only=None):
        self.config = ConfigParser.SafeConfigParser()
        if os.environ.get('VMWARE_INI', ''):
            config_files = [os.environ['VMWARE_INI']]
        else:
コード例 #9
0
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with subliminal.  If not, see <http://www.gnu.org/licenses/>.
from .api import list_subtitles, download_subtitles
from . async import Pool
from .core import (SERVICES, LANGUAGE_INDEX, SERVICE_INDEX, SERVICE_CONFIDENCE,
                   MATCHING_CONFIDENCE)
from .infos import __version__
import logging
try:
    from logging import NullHandler
except ImportError:

    class NullHandler(logging.Handler):
        def emit(self, record):
            pass


__all__ = [
    'SERVICES', 'LANGUAGE_INDEX', 'SERVICE_INDEX', 'SERVICE_CONFIDENCE',
    'MATCHING_CONFIDENCE', 'list_subtitles', 'download_subtitles', 'Pool'
]
logging.getLogger("subliminal").addHandler(NullHandler())
コード例 #10
0
ファイル: __init__.py プロジェクト: fehuapaya/scrapli
        self.last_log: Optional[Tuple[str, int, str]] = None

    def filter(self, record: logging.LogRecord) -> bool:
        """
        Filter duplicate entries in logs

        Fields to compare to previous log entry if these fields match; skip log entry

        Args:
            record: log record to check

        Returns:
            bool: filter or not

        Raises:
            N/A  # noqa

        """
        current_log = (record.module, record.levelno, record.msg)
        if current_log != getattr(self, "last_log", None):
            self.last_log = current_log
            return True
        return False


# Setup channel logger
TRANSPORT_LOG = logging.getLogger("channel")
# Add duplicate filter to channel log
TRANSPORT_LOG.addFilter(DuplicateFilter())
logging.getLogger("channel").addHandler(NullHandler())
コード例 #11
0
    from logging import NullHandler
except ImportError:

    class NullHandler(logging.Handler):
        def emit(self, record):
            pass


import random
from requests.exceptions import ConnectionError
from requests import Session
import time

from bitbucket.exceptions import BitbucketError

logging.getLogger('bitbucket').addHandler(NullHandler())


def raise_on_error(r, verb='???', **kwargs):
    request = kwargs.get('request', None)

    if r is None:
        raise BitbucketError(None, **kwargs)

    if r.status_code >= 400:
        error = ''
        if r.status_code == 403 and "x-authentication-denied-reason" in r.headers:
            error = r.headers["x-authentication-denied-reason"]
        elif r.text:
            try:
                response = json.loads(r.text)
コード例 #12
0
def main():
    start = time.time()

    parser = make_parser()
    arguments = parser.parse_args()

    # Initialize timeout_override
    timeout_override = True if arguments.command == 'optimize' else False

    # Argparse nearly gets all conditions covered.
    # These remain because mutually exclusive arguments must be optional.
    if arguments.command == 'alias':
        if not arguments.alias_older_than and not arguments.unalias_older_than:
            print(
                '{0} delete: error: expect one of --alias-older-than or --unalias-older-than'
                .format(sys.argv[0]))
            sys.exit(1)

    if arguments.command == 'delete':
        if not arguments.older_than and not arguments.disk_space:
            print(
                '{0} delete: error: expect one of --older-than or --disk-space'
                .format(sys.argv[0]))
            sys.exit(1)

    if arguments.command == 'show':
        # Do not log and force dry-run if we opt to show indices or snapshots.
        arguments.log_file = os.devnull
        arguments.dry_run = True
        if not arguments.show_indices and not arguments.show_snapshots:
            print(
                '{0} show: error: expect one of --show-indices or --show-snapshots'
                .format(sys.argv[0]))
            sys.exit(1)
        if arguments.show_snapshots and not arguments.repository:
            print(
                '{0} show: error: --repository required with --show-snapshots'.
                format(sys.argv[0]))
            sys.exit(1)

    if arguments.command == 'snapshot':
        if not arguments.older_than and not arguments.most_recent and not arguments.delete_older_than and not arguments.all_indices:
            print(
                '{0} snapshot: error: expect one of --all-indices, --older-than, --most-recent, or --delete-older-than'
                .format(sys.argv[0]))
            sys.exit(1)
        if arguments.older_than or arguments.most_recent or arguments.all_indices:
            timeout_override = True

    # Setup logging
    if arguments.debug:
        numeric_log_level = logging.DEBUG
        format_string = '%(asctime)s %(levelname)-9s %(name)22s %(funcName)22s:%(lineno)-4d %(message)s'
    else:
        numeric_log_level = getattr(logging, arguments.log_level.upper(), None)
        format_string = '%(asctime)s %(levelname)-9s %(message)s'
        if not isinstance(numeric_log_level, int):
            raise ValueError('Invalid log level: %s' % arguments.log_level)

    date_string = None
    if arguments.logformat == 'logstash':
        os.environ['TZ'] = 'UTC'
        time.tzset()
        format_string = '{"@timestamp":"%(asctime)s.%(msecs)03dZ", "loglevel":"%(levelname)s", "name":"%(name)s", "function":"%(funcName)s", "linenum":"%(lineno)d", "message":"%(message)s"}'
        date_string = '%Y-%m-%dT%H:%M:%S'

    logging.basicConfig(level=numeric_log_level,
                        format=format_string,
                        datefmt=date_string,
                        stream=open(arguments.log_file, 'a')
                        if arguments.log_file else sys.stderr)

    # Filter out logging from Elasticsearch and associated modules by default
    if not arguments.debug:
        for handler in logging.root.handlers:
            handler.addFilter(
                Whitelist('root', '__main__', 'curator', 'curator.curator'))

    # Setting up NullHandler to handle nested elasticsearch.trace Logger instance in elasticsearch python client
    logging.getLogger('elasticsearch.trace').addHandler(NullHandler())

    logging.info("Job starting...")

    if arguments.dry_run:
        logging.info("DRY RUN MODE.  No changes will be made.")

    # Override the timestamp in case the end-user doesn't.
    if timeout_override and arguments.timeout == 30:
        logger.info(
            'Default timeout of 30 seconds is too low for command {0}.  Overriding to 21,600 seconds (6 hours).'
            .format(arguments.command.upper()))
        arguments.timeout = 21600

    client = elasticsearch.Elasticsearch(host=arguments.host,
                                         http_auth=arguments.auth,
                                         port=arguments.port,
                                         url_prefix=arguments.url_prefix,
                                         timeout=arguments.timeout,
                                         use_ssl=arguments.ssl)

    # Verify the version is acceptable.
    check_version(client)

    if arguments.master_only and not curator.is_master_node(client):
        logger.info(
            'Master-only flag detected. Connected to non-master node. Aborting.'
        )
        sys.exit(0)

    if arguments.command != "show":
        if arguments.timestring:
            validate_timestring(arguments.timestring, arguments.time_unit)
        else:  # Set default timestrings
            arguments.timestring = DATEMAP[arguments.time_unit]
            logging.debug("Setting default timestring for {0} to {1}".format(
                arguments.time_unit, arguments.timestring))
        logging.debug("Matching indices with pattern: {0}{1}".format(
            arguments.prefix, arguments.timestring))

    # Execute the command specified in the arguments
    argdict = arguments.__dict__
    logging.debug("argdict = {0}".format(argdict))
    arguments.func(client, **argdict)

    logger.info('Done in {0}.'.format(timedelta(seconds=time.time() - start)))
コード例 #13
0
try:
    import SocketServer
except ImportError:
    import socketserver as SocketServer

from logging import NullHandler
from paramiko import client, RSAKey, ssh_exception
from hpc_plugin.utilities import shlex_quote

# # @TODO `posixpath` can be used for common pathname manipulations on
# #       remote HPC systems
# import posixpath as cli_path

logging.getLogger("paramiko").setLevel(logging.WARNING)
# Hack to avoid "Error reading SSH protocol banner" random issue
logging.getLogger('paramiko.transport').addHandler(NullHandler())


class SshClient(object):
    """Represents a ssh client"""
    _client = None

    def __init__(self, credentials):
        # Build a tunnel if necessary
        self._tunnel = None
        self._host = credentials['host']
        self._port = int(credentials['port']) if 'port' in credentials else 22
        if 'tunnel' in credentials:
            self._tunnel = SshForward(credentials)
            self._host = "localhost"
            self._port = self._tunnel.port()
コード例 #14
0
"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon

"""
__author__ = 'Gavin M. Roy <*****@*****.**>'
__since__ = "2009-09-10"
__version__ = "3.5.0"

from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException

import logging
try:
    from logging import NullHandler
except ImportError:

    class NullHandler(logging.Handler):
        """Python 2.6 does not have a NullHandler"""
        def emit(self, record):
            """Emit a record
            :param record record: The record to emit
            """
            pass


logging.getLogger('rejected').addHandler(NullHandler())
コード例 #15
0
ファイル: ozwave.py プロジェクト: zegohome/python-openzwave
from openzwave.option import ZWaveOption
from louie import dispatcher, All
from pyozwweb.app import socketio, app

import logging
try:  # Python 2.7+
    from logging import NullHandler
except ImportError:

    class NullHandler(logging.Handler):
        """NullHandler logger for python 2.6"""
        def emit(self, record):
            pass


logging.getLogger('pyozwweb').addHandler(NullHandler())


@socketio.on('my echo event', namespace='/ozwave')
def echo_message(message):
    session['receive_count'] = session.get('receive_count', 0) + 1
    logging.debug("Client %s request echo message : %s", request.remote_addr,
                  message)
    emit('my response', {
        'data': message['data'],
        'count': session['receive_count']
    })


@socketio.on('disconnect request', namespace='/ozwave')
def disconnect_request():
コード例 #16
0
ファイル: resources.py プロジェクト: priord/jira
import re
import logging
try:  # Python 2.7+
    from logging import NullHandler
except ImportError:
    class NullHandler(logging.Handler):

        def emit(self, record):
            pass
import json

from six import iteritems, string_types, text_type

from .utils import threaded_requests, json_loads, CaseInsensitiveDict

logging.getLogger('jira').addHandler(NullHandler())


def get_error_list(r):
    error_list = []
    if r.status_code >= 400:
        if r.status_code == 403 and "x-authentication-denied-reason" in r.headers:
            error_list = [r.headers["x-authentication-denied-reason"]]
        elif r.text:
            try:
                response = json_loads(r)
                if 'message' in response:
                    # JIRA 5.1 errors
                    error_list = [response['message']]
                elif 'errorMessages' in response and len(response['errorMessages']) > 0:
                    # JIRA 5.0.x error messages sometimes come wrapped in this array
コード例 #17
0
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
###############################################################################

"""
The core module contains all core abstracts and classes.

All core abstracts and implementations for core concept classes (Study,
Environment, Parameter generation, etc.). This module also includes interface
abstracts, base class abstracts, and general utilities.
"""

import logging


try:  # Python 2.7+
    from logging import NullHandler
except ImportError:
    class NullHandler(logging.Handler):
        """Null logging handler for Python 3+."""

        def emit(self, record):
            """Override so that logging outputs nothing."""
            pass

LOGGER = logging.getLogger(__name__)
LOGGER.addHandler(NullHandler())

__version_info__ = ("1", "1", "7")
__version__ = '.'.join(__version_info__)
コード例 #18
0
ファイル: core.py プロジェクト: zniper/django-concurrency
from __future__ import absolute_import, unicode_literals

import logging

from concurrency.config import conf

# Set default logging handler to avoid "No handler found" warnings.
try:  # Python 2.7+
    from logging import NullHandler
except ImportError:
    class NullHandler(logging.Handler):
        def emit(self, record):
            pass

logging.getLogger('concurrency').addHandler(NullHandler())

logger = logging.getLogger(__name__)

__all__ = []


def get_version_fieldname(obj):
    return obj._concurrencymeta.field.attname


def _set_version(obj, version):
    """
    Set the given version on the passed object

    This function should be used with 'raw' values, any type conversion should be managed in
    VersionField._set_version_value(). This is needed for future enhancement of concurrency.
コード例 #19
0
ファイル: __init__.py プロジェクト: KenjiOhtsuka/PepperTweet
from oauth1_auth import OAuth1
from oauth1_session import OAuth1Session
#from .oauth2_auth import OAuth2
#from .oauth2_session import OAuth2Session, TokenUpdated

__version__ = '0.4.1'

import requests
if requests.__version__ < '2.0.0':
    msg = ('You are using requests version %s, which is older than '
           'requests-oauthlib expects, please upgrade to 2.0.0 or later.')
    raise Warning(msg % requests.__version__)

import logging
try:  # Python 2.7+
    from logging import NullHandler
except ImportError:

    class NullHandler(logging.Handler):
        def emit(self, record):
            pass


logging.getLogger('requests_oauthlib').addHandler(NullHandler())
コード例 #20
0
ファイル: xitherm.py プロジェクト: eblot/homeassist
# Require Paho-MQTT https://pypi.org/project/paho-mqtt/

from argparse import ArgumentParser, FileType
from configparser import ConfigParser
from json import dumps as jdumps
from logging import (DEBUG, ERROR, Formatter, Logger, NullHandler,
                     StreamHandler, getLogger)
from pprint import pprint
from socket import gethostname
from struct import unpack as sunpack, calcsize as scalc
from sys import exit as sysexit, modules, stderr
from time import sleep
from traceback import print_exc
from typing import Iterable, Optional, Set, Union
# workaround to prevent Bleson to setup up a logging basicConfig
Logger.root.addHandler(NullHandler())
from bleson import get_provider, logger, Observer
from paho.mqtt.client import Client, connack_string

TRUE_BOOLEANS = ['on', 'high', 'true', 'enable', 'enabled', 'yes', '1']
"""String values evaluated as true boolean values"""

FALSE_BOOLEANS = ['off', 'low', 'false', 'disable', 'disabled', 'no', '0']
"""String values evaluated as false boolean values"""


class MqttClient(Client):
    def __init__(self, *args, **kwargs):
        self._log = getLogger('xitherm.mqtt')
        super().__init__(*args, **kwargs)
コード例 #21
0
    oauthlib
    ~~~~~~~~

    A generic, spec-compliant, thorough implementation of the OAuth
    request-signing logic.

    :copyright: (c) 2019 by The OAuthlib Community
    :license: BSD, see LICENSE for details.
"""
import logging
from logging import NullHandler

__author__ = 'The OAuthlib Community'
__version__ = '3.2.0'

logging.getLogger('oauthlib').addHandler(NullHandler())

_DEBUG = False


def set_debug(debug_val):
    """Set value of debug flag
	
    :param debug_val: Value to set. Must be a bool value.
	"""
    global _DEBUG
    _DEBUG = debug_val


def get_debug():
    """Get debug mode value. 
コード例 #22
0
    def __init__(self, credentials):
        self.credentials = credentials

        # Logging setup.
        self.logger = logging.getLogger(__name__)
        self.logger.addHandler(NullHandler())
コード例 #23
0
import logging
import pkg_resources
import sys
from types import ModuleType
# allow high-level functions to be accessed directly from the mappyfile module
from mappyfile.utils import open, load, loads, find, findall, findunique, dumps, dump, save
from mappyfile.utils import findkey, update, validate

__version__ = "0.8.4"

__all__ = ['open', 'load', 'loads', 'find', 'findall', 'findunique', 'dumps', 'dump', 'save',
           'findkey', 'update', 'validate']


plugins = ModuleType('mappyfile.plugins')
sys.modules['mappyfile.plugins'] = plugins

for ep in pkg_resources.iter_entry_points(group='mappyfile.plugins'):
    setattr(plugins, ep.name, ep.load())

# Set default logging handler to avoid "No handler found" warnings.

try:  # Python 2.7+
    from logging import NullHandler
except ImportError:
    class NullHandler(logging.Handler):
        def emit(self, record):
            pass

logging.getLogger("mappyfile").addHandler(NullHandler())
コード例 #24
0
ファイル: __init__.py プロジェクト: noisyboiler/nameko-wamp
import logging

try:  # Python 2.7+
    from logging import NullHandler
except ImportError:

    class NullHandler(logging.Handler):
        def emit(self, record):
            pass


root = logging.getLogger(__name__)
root.addHandler(NullHandler())
コード例 #25
0
ファイル: __init__.py プロジェクト: pinheirochagas/choochoo
from glob import glob
from logging import getLogger, NullHandler
from os.path import abspath, dirname, join
from sys import version_info

getLogger('bokeh').addHandler(NullHandler())
getLogger('tornado').addHandler(NullHandler())


class FatalException(Exception):

    '''
    Base class for exceptions that we can't ignore at some higher level
    (fundamental things like bad config).
    '''

    pass


from .commands.activities import activities
from .commands.args import COMMAND, parser, NamespaceWithVariables, PROGNAME, HELP, DEV, DIARY, FIT, \
    PACKAGE_FIT_PROFILE, ACTIVITIES, NO_OP, CONFIG, CONSTANTS, STATISTICS, TEST_SCHEDULE, MONITOR, GARMIN, \
    UNLOCK, DUMP, FIX_FIT, CH2_VERSION, JUPYTER, TUI
from .commands.constants import constants
from .commands.dump import dump
from .commands.config import config
from .commands.diary import diary
from .commands.fit import fit
from .commands.fix_fit import fix_fit
from .commands.garmin import garmin
from .commands.jupyter import jupyter
コード例 #26
0
def main(argv=sys.argv[1:]):
    if PY2:
        argv = [unicode(arg, sys.getfilesystemencoding())
                for arg in argv]  # noqa

    # Insert positional argument separator, if not already present
    if "--" not in argv:
        for i, argument in enumerate(argv):
            if not argument.startswith("-"):
                argv.insert(i, "--")
                break

    arg_parser = ArgumentParser(
        prog="maybe",
        usage="%(prog)s [options] command [argument ...]",
        description=
        "Run a command without the ability to make changes to your system " +
        "and list the changes it would have made.",
        epilog="For more information, to report issues or to contribute, " +
        "visit https://github.com/p-e-w/maybe.",
    )
    arg_parser.add_argument("command",
                            nargs="+",
                            help="the command to run under maybe's control")
    arg_group = arg_parser.add_mutually_exclusive_group()
    arg_group.add_argument(
        "-a",
        "--allow",
        nargs="+",
        metavar="OPERATION",
        help="allow the command to perform the specified operation(s). " +
        "all other operations will be denied. " +
        "possible values for %(metavar)s are: " +
        ", ".join(sorted(SYSCALL_FILTERS.keys())) +
        "; as well as any filter scopes defined by loaded plugins")
    arg_group.add_argument(
        "-d",
        "--deny",
        nargs="+",
        metavar="OPERATION",
        help="deny the command the specified operation(s). " +
        "all other operations will be allowed. " +
        "see --allow for a list of possible values for %(metavar)s. " +
        "--allow and --deny cannot be combined")
    arg_parser.add_argument(
        "-p",
        "--plugin",
        nargs="+",
        metavar="FILE",
        help="load the specified plugin script(s). " +
        "see the README for details and plugin API documentation")
    arg_parser.add_argument(
        "-l",
        "--list-only",
        action="store_true",
        help="list operations without header, indentation and rerun prompt")
    arg_parser.add_argument(
        "--style-output",
        choices=["yes", "no", "auto"],
        default="auto",
        help="colorize output using ANSI escape sequences (yes/no) " +
        "or automatically decide based on whether stdout is a terminal (auto, default)"
    )
    arg_parser.add_argument(
        "-v",
        "--verbose",
        action="count",
        help="if specified once, print every filtered syscall. " +
        "if specified twice, print every syscall, highlighting filtered syscalls"
    )
    arg_parser.add_argument("--version",
                            action="version",
                            version="%(prog)s 0.4.0")
    args = arg_parser.parse_args(argv)

    initialize_terminal(args.style_output)

    if args.plugin is not None:
        for plugin_path in args.plugin:
            try:
                module_name = splitext(basename(plugin_path))[0]
                # Note: imp.load_source is *long* deprecated and not even documented
                # in Python 3 anymore, but it still seems to work and the "alternatives"
                # (see http://stackoverflow.com/a/67692) are simply too insane to use
                load_source(module_name, plugin_path)
            except Exception as error:
                print(
                    T.red("Error loading %s: %s." %
                          (T.bold(plugin_path) + T.red, error)))
                return 1

    if args.allow is not None:
        for filter_scope in args.allow:
            if filter_scope not in SYSCALL_FILTERS:
                print(
                    T.red("Unknown operation in --allow: %s." %
                          (T.bold(filter_scope) + T.red)))
                return 1
        filter_scopes = set(SYSCALL_FILTERS.keys()) - set(args.allow)
    elif args.deny is not None:
        for filter_scope in args.deny:
            if filter_scope not in SYSCALL_FILTERS:
                print(
                    T.red("Unknown operation in --deny: %s." %
                          (T.bold(filter_scope) + T.red)))
                return 1
        filter_scopes = args.deny
    else:
        filter_scopes = SYSCALL_FILTERS.keys()

    syscall_filters = {}

    for filter_scope in SYSCALL_FILTERS:
        if filter_scope in filter_scopes:
            for syscall in SYSCALL_FILTERS[filter_scope]:
                syscall_filters[syscall] = SYSCALL_FILTERS[filter_scope][
                    syscall]

    # Suppress logging output from python-ptrace
    getLogger().addHandler(NullHandler())

    # Prevent python-ptrace from decoding arguments to keep raw numerical values
    DIRFD_ARGUMENTS.clear()
    SYSCALL_ARG_DICT.clear()
    ARGUMENT_CALLBACK.clear()

    # This is basically "shlex.join"
    command = " ".join([(("'%s'" % arg) if (" " in arg) else arg)
                        for arg in args.command])

    try:
        args.command[0] = locateProgram(args.command[0])
        pid = createChild(args.command, False)
    except Exception as error:
        print(
            T.red("Error executing %s: %s." %
                  (T.bold(command) + T.red, error)))
        return 1

    debugger = PtraceDebugger()
    debugger.traceFork()
    debugger.traceExec()

    process = debugger.addProcess(pid, True)
    process.syscall()

    try:
        operations = get_operations(debugger, syscall_filters, args.verbose)
    except Exception as error:
        print(T.red("Error tracing process: %s." % error))
        return 1
    except KeyboardInterrupt:
        print(
            T.yellow("%s terminated by keyboard interrupt." %
                     (T.bold(command) + T.yellow)))
        return 2
    finally:
        # Cut down all processes no matter what happens
        # to prevent them from doing any damage
        debugger.quit()

    if operations:
        if not args.list_only:
            print(
                "%s has prevented %s from performing %d file system operations:\n"
                % (T.bold("maybe"), T.bold(command), len(operations)))
        for operation in operations:
            print(("" if args.list_only else "  ") + operation)
        if not args.list_only:
            print(
                "\nDo you want to rerun %s and permit these operations? [y/N] "
                % T.bold(command),
                end="")
            try:
                choice = input()
            except KeyboardInterrupt:
                choice = ""
                # Ctrl+C does not print a newline automatically
                print("")
            if choice.lower() == "y":
                subprocess.call(args.command)
    else:
        print("%s has not detected any file system operations from %s." %
              (T.bold("maybe"), T.bold(command)))
コード例 #27
0
ファイル: __init__.py プロジェクト: fcom86/flake8
"""
import logging
try:
    from logging import NullHandler
except ImportError:
    class NullHandler(logging.Handler):
        """Shim for version of Python < 2.7."""

        def emit(self, record):
            """Do nothing."""
            pass
import sys

LOG = logging.getLogger(__name__)
LOG.addHandler(NullHandler())

# Clean up after LOG config
del NullHandler

__version__ = '3.2.1.dev0'
__version_info__ = tuple(int(i) for i in __version__.split('.') if i.isdigit())


# There is nothing lower than logging.DEBUG (10) in the logging library,
# but we want an extra level to avoid being too verbose when using -vv.
_EXTRA_VERBOSE = 5
logging.addLevelName(_EXTRA_VERBOSE, 'VERBOSE')

_VERBOSITY_TO_LOG_LEVEL = {
    # output more than warnings but not debugging info
コード例 #28
0
ファイル: __init__.py プロジェクト: tws0002/kk-dailies
# Set default logging handler to avoid "No handler found" warnings.
import logging
try:  # Python 2.7+
    from logging import NullHandler
except ImportError:

    class NullHandler(logging.Handler):
        def emit(self, record):
            pass


logging.getLogger().addHandler(NullHandler())

from dailies import Dailies
from dailies import set_logger
コード例 #29
0
import rasterio._err
import rasterio.coords
import rasterio.enums
import rasterio.path

__all__ = ['band', 'open', 'pad', 'Env']
__version__ = "1.0.26"
__gdal_version__ = gdal_version()

# Rasterio attaches NullHandler to the 'rasterio' logger and its
# descendents. See
# https://docs.python.org/2/howto/logging.html#configuring-logging-for-a-library
# Applications must attach their own handlers in order to see messages.
# See rasterio/rio/main.py for an example.
log = logging.getLogger(__name__)
log.addHandler(NullHandler())


@ensure_env_with_credentials
def open(fp,
         mode='r',
         driver=None,
         width=None,
         height=None,
         count=None,
         crs=None,
         transform=None,
         dtype=None,
         nodata=None,
         sharing=True,
         **kwargs):
コード例 #30
0
    def __init__(self):
        """
Constructor __init__(LogHandler)

:since: v1.0.0
        """

        # global: _api_type, _API_PYTHON
        # pylint: disable=protected-access

        AbstractLogHandler.__init__(self)

        self.logger = None
        """
Logger object
        """
        self.log_file_path_name = None
        """
Path and filename of the log file
        """
        self.log_format_datetime = Settings.get("global_log_datetime",
                                                "%m/%d/%Y %H:%M:%S")
        """
Date/Time format
        """
        self.log_file_size_max = int(
            Settings.get("global_log_size_max", 104857600))
        """
File size a log file gets rotated
        """
        self.log_file_rotates = int(Settings.get("global_log_rotates", 5))
        """
Preserve the amount of files
        """

        self.level_map = {
            "debug": DEBUG,
            "error": ERROR,
            "info": INFO,
            "warning": WARNING
        }

        level = Settings.get("global_log_level")
        self.level['global'] = self.level_map.get(level, WARNING)

        self.logger = logging.getLogger(self._ident)
        self.logger.setLevel(DEBUG)

        if (LogHandler._log_handler is None):
            self._init_handler()

            if (_api_type == _API_PYTHON):
                logger_root = logging.getLogger()

                if ((hasattr(logger_root, "hasHandlers") and
                     (not logger_root.hasHandlers()))
                        or len(logger_root.handlers) < 1):
                    if (Settings.get("global_log_initialize_root", True)):
                        logger_root.addHandler(self.log_handler)
                    else:
                        logger_root.addHandler(NullHandler())
                #

                self.logger.addHandler(self.log_handler)
                self.logger.propagate = False
            else:
                self.logger.addHandler(self.log_handler)
                self.logger.setUseParentHandlers(False)
            #

            LogHandler._log_handler = self.log_handler
        else:
            self.log_handler = LogHandler._log_handler

        self.log_thread_id = Settings.get("global_log_thread_id", False)