Ejemplo n.º 1
0
 def __init__(self, *args, **kwds):
   NullHandler.__init__(self, *args, **kwds )
   self.log4pyProps = {}
   self.channelName = None
   self.nameContext = None
   self.prodId = "RESOURCE.ID"
   self.prodName = "RESOURCE.NAME"
   self.prodFQN = "RESOURCE.FQN"
   self._channelName = None
   self._nameContext = None
   self._event_channel = None
   self._ecm = None
   self._pub = None
   self._enable = False
   self.threshold = logging.NOTSET
Ejemplo n.º 2
0
def create_logger(robot, path=None, debug=False):

    logger = logging.getLogger(robot)
    logger.setLevel(logging.DEBUG if debug else logging.INFO)

    if path is None:
        handler = NullHandler()
    else:
        logFile = os.path.join(path, robot + '.log')
        handler = TimedRotatingFileHandler(logFile, when='midnight')

    formatter = Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    handler.setFormatter(formatter)

    logger.addHandler(handler)

    return logger
Ejemplo n.º 3
0
 def __init__(self, endpoint, token, logger_name="dexcell_rest_api"):
     self.endpoint = endpoint
     self.token = token
     self.logger = logging.getLogger(logger_name)
     if len(self.logger.handlers) == 0:
         self.logger.setLevel(logging.INFO)
         self.handler = NullHandler()
         h_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
         self.handler.setFormatter(logging.Formatter(h_format))
         self.logger.addHandler(self.handler)
Ejemplo n.º 4
0
 def __init__(self, endpoint, hash_dexma, secret, logger_name="dexcell_rest_api_auth"):
     self.endpoint = endpoint
     self.hash = hash_dexma
     self.secret = secret
     self.logger = logging.getLogger(logger_name)
     if len(self.logger.handlers) == 0:
         self.logger.setLevel(logging.INFO)
         self.handler = NullHandler()
         h_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
         self.handler.setFormatter(logging.Formatter(h_format))
         self.logger.addHandler(self.handler)
Ejemplo n.º 5
0
 def setup(
     self,
     gateway=DEFAULT_GATEWAY,
     loggerName=DEFAULT_LOGGERNAME,
     logfile=DEFAULT_LOGFILE,
     loglevel=DEFAULT_LOGLEVEL,
     server=DEFAULT_SERVER,
     url=DEFAULT_URL,
 ):
     """Setup the Dexcell Sender Object
     """
     self.__server = server
     self.__url = url
     self.__gateway = gateway
     self.__logger = logging.getLogger(loggerName)
     if len(self.__logger.handlers) == 0:
         self.__logger.setLevel(loglevel)
         self.handler = NullHandler()
         h_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
         self.handler.setFormatter(logging.Formatter(h_format))
         self.__logger.addHandler(self.handler)
Ejemplo n.º 6
0
 def __init__(
     self,
     gateway=DEFAULT_GATEWAY,
     loggerName=DEFAULT_LOGGERNAME,
     logfile=DEFAULT_LOGFILE,
     loglevel=DEFAULT_LOGLEVEL,
     server=DEFAULT_SERVER,
     url=DEFAULT_URL,
     https=True,
     timeout=30.0,
 ):
     self.__https = https
     self.__server = server
     self.__url = url
     self.__timeout = timeout
     self.__gateway = gateway
     self.__logger = logging.getLogger(loggerName)
     if len(self.__logger.handlers) == 0:
         self.__logger.setLevel(loglevel)
         self.handler = NullHandler()
         h_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
         self.handler.setFormatter(logging.Formatter(h_format))
         self.__logger.addHandler(self.handler)
Ejemplo n.º 7
0
import logging

try:  # Python 2.7+
    from logging import NullHandler
except ImportError:

    class NullHandler(logging.Handler):
        def emit(self, record):
            pass


root = logging.getLogger(__name__)
root.addHandler(NullHandler())
Ejemplo n.º 8
0
import logging

from pypuppetdb.api import v2
from pypuppetdb.api import v3
from pypuppetdb.errors import UnsupportedVersionError

try:  # Python 2.7+
    from logging import NullHandler
except ImportError:  # pragma: notest

    class NullHandler(logging.Handler):
        def emit(self, record):
            pass


logging.getLogger(__name__).addHandler(NullHandler())


def connect(api_version=3,
            host='localhost',
            port=8080,
            ssl_verify=False,
            ssl_key=None,
            ssl_cert=None,
            timeout=10):
    """Connect with PuppetDB. This will return an object allowing you
    to query the API through its methods.

    :param api_version: Version of the API we're initialising.
    :type api_version: :obj:`int`
def cli(config, dry_run, action_file):
    """
    Curator for Elasticsearch indices.

    See http://elastic.co/guide/en/elasticsearch/client/curator/current
    """
    # Get config from yaml file
    yaml_config  = get_yaml(config)
    # Get default options and overwrite with any changes
    try:
        yaml_log_opts = prune_nones(yaml_config['logging'])
        log_opts      = settings.logs()
        log_opts.update(yaml_log_opts)
    except KeyError:
        # Use the defaults if there is no logging section
        log_opts = settings.logs()
    # Set up logging
    loginfo = LogInfo(log_opts)
    logging.root.addHandler(loginfo.handler)
    logging.root.setLevel(loginfo.numeric_log_level)
    logger = logging.getLogger('curator.cli')
    # Set up NullHandler() to handle nested elasticsearch.trace Logger
    # instance in elasticsearch python client
    logging.getLogger('elasticsearch.trace').addHandler(NullHandler())
    if log_opts['blacklist']:
        for bl_entry in ensure_list(log_opts['blacklist']):
            for handler in logging.root.handlers:
                handler.addFilter(Blacklist(bl_entry))

    # Get default client options and overwrite with any changes
    try:
        yaml_client  = prune_nones(yaml_config['client'])
        client_args  = settings.client()
        client_args.update(yaml_client)
    except KeyError:
        logger.critical(
            'Unable to read client configuration. '
            'Please check the configuration file: {0}'.format(config)
        )
        sys.exit(1)
    test_client_options(client_args)

    # Extract this and save it for later, in case there's no timeout_override.
    default_timeout = client_args.pop('timeout')
    logger.debug('default_timeout = {0}'.format(default_timeout))
    #########################################
    ### Start working on the actions here ###
    #########################################
    actions = get_yaml(action_file)['actions']
    logger.debug('Full list of actions: {0}'.format(actions))
    action_keys = sorted(list(actions.keys()))
    for idx in action_keys:
        if 'action' in actions[idx] and actions[idx]['action'] is not None:
            action = actions[idx]['action'].lower()
        else:
            raise MissingArgument('No value for "action" provided')
        logger.info('Action #{0}: {1}'.format(idx, action))
        if not 'options' in actions[idx] or \
                type(actions[idx]['options']) is not type(dict()):
            actions[idx]['options'] = settings.options()
        # Assign and remove these keys from the options as the action will
        # raise an exception if they are passed as kwargs
        action_disabled = actions[idx]['options'].pop('disable_action', False)
        continue_if_exception = (
            actions[idx]['options'].pop('continue_if_exception', False))
        timeout_override = actions[idx]['options'].pop('timeout_override', None)
        ignore_empty_list = actions[idx]['options'].pop(
            'ignore_empty_list', None)
        logger.debug(
            'continue_if_exception = {0}'.format(continue_if_exception))
        logger.debug('timeout_override = {0}'.format(timeout_override))

        ### Skip to next action if 'disabled'
        if action_disabled:
            logger.info(
                'Action "{0}" not performed because "disable_action" is set to '
                'True'.format(action)
            )
            continue

        # Override the timeout, if specified, otherwise use the default.
        if type(timeout_override) == type(int()):
            client_args['timeout'] = timeout_override
        else:
            client_args['timeout'] = default_timeout

        # Set up action kwargs
        kwargs = {}
        kwargs['master_timeout'] = (
            client_args['timeout'] if client_args['timeout'] <= 300 else 300)
        kwargs['dry_run'] = dry_run
        kwargs['timeout'] = client_args['timeout']

        # Create a client object for each action...
        client = get_client(**client_args)
        logger.debug('client is {0}'.format(type(client)))
        ##########################
        ### Process the action ###
        ##########################
        try:
            logger.debug('TRY: actions: {0} kwargs: '
                '{1}'.format(actions[idx], kwargs)
            )
            process_action(client, actions[idx], **kwargs)
        except Exception as e:
            if str(type(e)) == "<class 'curator.exceptions.NoIndices'>" or \
                str(type(e)) == "<class 'curator.exceptions.NoSnapshots'>":
                if ignore_empty_list:
                    logger.info(
                        'Skipping action "{0}" due to empty list: '
                        '{1}'.format(action, type(e))
                    )
                else:
                    logger.error(
                        'Unable to complete action "{0}".  No actionable items '
                        'in list: {1}'.format(action, type(e))
                    )
                    sys.exit(1)
            else:
                logger.error(
                    'Failed to complete action: {0}.  {1}: '
                    '{2}'.format(action, type(e), e)
                )
                if continue_if_exception:
                    logger.info(
                        'Continuing execution with next action because '
                        '"continue_if_exception" is set to True for action '
                        '{0}'.format(action)
                    )
                else:
                    sys.exit(1)
        logger.info('Action #{0}: completed'.format(idx))
    logger.info('Job completed.')
Ejemplo n.º 10
0
    'Search',
    'Catalog',

    # API.
    'get_item',
    'get_files',
    'modify_metadata',
    'upload',
    'download',
    'delete',
    'get_tasks',
    'search_items',
    'get_session',
    'configure',
    'get_username',
]


# Set default logging handler to avoid "No handler found" warnings.
import logging
try:  # Python 2.7+
    from logging import NullHandler
except ImportError:
    class NullHandler(logging.Handler):
        def emit(self, record):
            pass


log = logging.getLogger(__name__)
log.addHandler(NullHandler())
Ejemplo n.º 11
0
        def emit(self, record):
            pass
        def createLock(self):
            self.lock = None

LOG_MSG = ''
_VERSION_ = "1.1.2-0"
if VERSION < (1, 0, 0):
    msg = """At least v1.0.0 of the ElasticSearch Python client is required.\n
             Found %r""" % (VERSION)
    app.logger.error(msg)

_read_timeout = 120
timeoutobj = Timeout(total=1200, connect=10, read=_read_timeout)
# Silence logging messages from the elasticsearch client library
logging.getLogger('elasticsearch').addHandler(NullHandler())
# to keep track of timestamp
DOCTYPE = 'sar'
_NAME_ = "index-sar"
TS_ALL = []
_op_type = 'create'


def gen_action(index, rec, nodename, ts):
    md5 = hashlib.md5((nodename + ts).encode('utf-8'))
    TS_ALL.append(ts)
    # ix_all.append(index)
    action = {
        "_op_type": _op_type,
        "_index": index,
        "_type": DOCTYPE,
Ejemplo n.º 12
0
def get_default_logger():
    """Get the default dkimpy logger."""
    logger = logging.getLogger('dkimpy')
    if not logger.handlers:
        logger.addHandler(NullHandler())
    return logger
Ejemplo n.º 13
0
def test_null_handler(response):
    nh = NullHandler()
    formatter = color_debug.ColorFormatter()
    nh.setFormatter(formatter)
Ejemplo n.º 14
0
try:
    from sip import setapi
except ImportError:
    pass
else:
    setapi('QString', 2)
    setapi('QVariant', 2)

import sys
from ._mpl_backend import MatplotlibBackendSetter
sys.meta_path.append(MatplotlibBackendSetter())

import logging
from logging import NullHandler

logging.getLogger('glue').addHandler(NullHandler())


def custom_viewer(name, **kwargs):
    """
    Create a custom interactive data viewer.

    To use this, first create a new variable by calling custom_viewer.
    Then, register one or more viewer functions using decorators.

    :param name: The name of the new viewer
    :type name: str

    Named arguments are used to build widgets and pass data
    to viewer functions. See ``specifying widgets`` below.
Ejemplo n.º 15
0
from oauth1_auth import OAuth1
from oauth1_session import OAuth1Session
#from .oauth2_auth import OAuth2
#from .oauth2_session import OAuth2Session, TokenUpdated

__version__ = '0.4.1'

import requests
if requests.__version__ < '2.0.0':
    msg = ('You are using requests version %s, which is older than '
           'requests-oauthlib expects, please upgrade to 2.0.0 or later.')
    raise Warning(msg % requests.__version__)

import logging
try:  # Python 2.7+
    from logging import NullHandler
except ImportError:

    class NullHandler(logging.Handler):
        def emit(self, record):
            pass


logging.getLogger('requests_oauthlib').addHandler(NullHandler())
Ejemplo n.º 16
0
from openzwave.option import ZWaveOption
from louie import dispatcher, All
from pyozwweb.app import socketio, app

import logging
try:  # Python 2.7+
    from logging import NullHandler
except ImportError:

    class NullHandler(logging.Handler):
        """NullHandler logger for python 2.6"""
        def emit(self, record):
            pass


logging.getLogger('pyozwweb').addHandler(NullHandler())


@socketio.on('my echo event', namespace='/ozwave')
def echo_message(message):
    session['receive_count'] = session.get('receive_count', 0) + 1
    logging.debug("Client %s request echo message : %s", request.remote_addr,
                  message)
    emit('my response', {
        'data': message['data'],
        'count': session['receive_count']
    })


@socketio.on('disconnect request', namespace='/ozwave')
def disconnect_request():
Ejemplo n.º 17
0
# Set default logging handler to avoid "No handler found" warnings.
import logging
try:  # Python 2.7+
    from logging import NullHandler
except ImportError:

    class NullHandler(logging.Handler):
        def emit(self, record):
            pass


logging.getLogger().addHandler(NullHandler())

from dailies import Dailies
from dailies import set_logger
Ejemplo n.º 18
0
def main(argv=sys.argv[1:]):
    if PY2:
        argv = [unicode(arg, sys.getfilesystemencoding())
                for arg in argv]  # noqa

    # Insert positional argument separator, if not already present
    if "--" not in argv:
        for i, argument in enumerate(argv):
            if not argument.startswith("-"):
                argv.insert(i, "--")
                break

    arg_parser = ArgumentParser(
        prog="maybe",
        usage="%(prog)s [options] command [argument ...]",
        description=
        "Run a command without the ability to make changes to your system " +
        "and list the changes it would have made.",
        epilog="For more information, to report issues or to contribute, " +
        "visit https://github.com/p-e-w/maybe.",
    )
    arg_parser.add_argument("command",
                            nargs="+",
                            help="the command to run under maybe's control")
    arg_group = arg_parser.add_mutually_exclusive_group()
    arg_group.add_argument(
        "-a",
        "--allow",
        nargs="+",
        metavar="OPERATION",
        help="allow the command to perform the specified operation(s). " +
        "all other operations will be denied. " +
        "possible values for %(metavar)s are: " +
        ", ".join(sorted(SYSCALL_FILTERS.keys())) +
        "; as well as any filter scopes defined by loaded plugins")
    arg_group.add_argument(
        "-d",
        "--deny",
        nargs="+",
        metavar="OPERATION",
        help="deny the command the specified operation(s). " +
        "all other operations will be allowed. " +
        "see --allow for a list of possible values for %(metavar)s. " +
        "--allow and --deny cannot be combined")
    arg_parser.add_argument(
        "-p",
        "--plugin",
        nargs="+",
        metavar="FILE",
        help="load the specified plugin script(s). " +
        "see the README for details and plugin API documentation")
    arg_parser.add_argument(
        "-l",
        "--list-only",
        action="store_true",
        help="list operations without header, indentation and rerun prompt")
    arg_parser.add_argument(
        "--style-output",
        choices=["yes", "no", "auto"],
        default="auto",
        help="colorize output using ANSI escape sequences (yes/no) " +
        "or automatically decide based on whether stdout is a terminal (auto, default)"
    )
    arg_parser.add_argument(
        "-v",
        "--verbose",
        action="count",
        help="if specified once, print every filtered syscall. " +
        "if specified twice, print every syscall, highlighting filtered syscalls"
    )
    arg_parser.add_argument("--version",
                            action="version",
                            version="%(prog)s 0.4.0")
    args = arg_parser.parse_args(argv)

    initialize_terminal(args.style_output)

    if args.plugin is not None:
        for plugin_path in args.plugin:
            try:
                module_name = splitext(basename(plugin_path))[0]
                # Note: imp.load_source is *long* deprecated and not even documented
                # in Python 3 anymore, but it still seems to work and the "alternatives"
                # (see http://stackoverflow.com/a/67692) are simply too insane to use
                load_source(module_name, plugin_path)
            except Exception as error:
                print(
                    T.red("Error loading %s: %s." %
                          (T.bold(plugin_path) + T.red, error)))
                return 1

    if args.allow is not None:
        for filter_scope in args.allow:
            if filter_scope not in SYSCALL_FILTERS:
                print(
                    T.red("Unknown operation in --allow: %s." %
                          (T.bold(filter_scope) + T.red)))
                return 1
        filter_scopes = set(SYSCALL_FILTERS.keys()) - set(args.allow)
    elif args.deny is not None:
        for filter_scope in args.deny:
            if filter_scope not in SYSCALL_FILTERS:
                print(
                    T.red("Unknown operation in --deny: %s." %
                          (T.bold(filter_scope) + T.red)))
                return 1
        filter_scopes = args.deny
    else:
        filter_scopes = SYSCALL_FILTERS.keys()

    syscall_filters = {}

    for filter_scope in SYSCALL_FILTERS:
        if filter_scope in filter_scopes:
            for syscall in SYSCALL_FILTERS[filter_scope]:
                syscall_filters[syscall] = SYSCALL_FILTERS[filter_scope][
                    syscall]

    # Suppress logging output from python-ptrace
    getLogger().addHandler(NullHandler())

    # Prevent python-ptrace from decoding arguments to keep raw numerical values
    DIRFD_ARGUMENTS.clear()
    SYSCALL_ARG_DICT.clear()
    ARGUMENT_CALLBACK.clear()

    # This is basically "shlex.join"
    command = " ".join([(("'%s'" % arg) if (" " in arg) else arg)
                        for arg in args.command])

    try:
        args.command[0] = locateProgram(args.command[0])
        pid = createChild(args.command, False)
    except Exception as error:
        print(
            T.red("Error executing %s: %s." %
                  (T.bold(command) + T.red, error)))
        return 1

    debugger = PtraceDebugger()
    debugger.traceFork()
    debugger.traceExec()

    process = debugger.addProcess(pid, True)
    process.syscall()

    try:
        operations = get_operations(debugger, syscall_filters, args.verbose)
    except Exception as error:
        print(T.red("Error tracing process: %s." % error))
        return 1
    except KeyboardInterrupt:
        print(
            T.yellow("%s terminated by keyboard interrupt." %
                     (T.bold(command) + T.yellow)))
        return 2
    finally:
        # Cut down all processes no matter what happens
        # to prevent them from doing any damage
        debugger.quit()

    if operations:
        if not args.list_only:
            print(
                "%s has prevented %s from performing %d file system operations:\n"
                % (T.bold("maybe"), T.bold(command), len(operations)))
        for operation in operations:
            print(("" if args.list_only else "  ") + operation)
        if not args.list_only:
            print(
                "\nDo you want to rerun %s and permit these operations? [y/N] "
                % T.bold(command),
                end="")
            try:
                choice = input()
            except KeyboardInterrupt:
                choice = ""
                # Ctrl+C does not print a newline automatically
                print("")
            if choice.lower() == "y":
                subprocess.call(args.command)
    else:
        print("%s has not detected any file system operations from %s." %
              (T.bold("maybe"), T.bold(command)))
Ejemplo n.º 19
0
    def __init__(self, credentials):
        self.credentials = credentials

        # Logging setup.
        self.logger = logging.getLogger(__name__)
        self.logger.addHandler(NullHandler())
Ejemplo n.º 20
0
    oauthlib
    ~~~~~~~~

    A generic, spec-compliant, thorough implementation of the OAuth
    request-signing logic.

    :copyright: (c) 2019 by The OAuthlib Community
    :license: BSD, see LICENSE for details.
"""
import logging
from logging import NullHandler

__author__ = 'The OAuthlib Community'
__version__ = '3.2.0'

logging.getLogger('oauthlib').addHandler(NullHandler())

_DEBUG = False


def set_debug(debug_val):
    """Set value of debug flag
	
    :param debug_val: Value to set. Must be a bool value.
	"""
    global _DEBUG
    _DEBUG = debug_val


def get_debug():
    """Get debug mode value. 
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with subliminal.  If not, see <http://www.gnu.org/licenses/>.
from .api import list_subtitles, download_subtitles
from . async import Pool
from .core import (SERVICES, LANGUAGE_INDEX, SERVICE_INDEX, SERVICE_CONFIDENCE,
                   MATCHING_CONFIDENCE)
from .infos import __version__
import logging
try:
    from logging import NullHandler
except ImportError:

    class NullHandler(logging.Handler):
        def emit(self, record):
            pass


__all__ = [
    'SERVICES', 'LANGUAGE_INDEX', 'SERVICE_INDEX', 'SERVICE_CONFIDENCE',
    'MATCHING_CONFIDENCE', 'list_subtitles', 'download_subtitles', 'Pool'
]
logging.getLogger("subliminal").addHandler(NullHandler())
Ejemplo n.º 22
0
def get_logger_and_init_null(logger_name):
    logger = logging.getLogger(logger_name)
    logger.addHandler(NullHandler())
    return logger
Ejemplo n.º 23
0
 def __init__(self, client, resource_id=None, logger=None):
     self.logger = logger or init_cloudify_logger(NullHandler(),
                                                  'AWSResourceBase')
     self.client = client
     self.resource_id = str(resource_id) if resource_id else None
Ejemplo n.º 24
0
import logging
import pkg_resources
import sys
from types import ModuleType
# allow high-level functions to be accessed directly from the mappyfile module
from mappyfile.utils import open, load, loads, find, findall, findunique, dumps, dump, save
from mappyfile.utils import findkey, update, validate

__version__ = "0.8.4"

__all__ = ['open', 'load', 'loads', 'find', 'findall', 'findunique', 'dumps', 'dump', 'save',
           'findkey', 'update', 'validate']


plugins = ModuleType('mappyfile.plugins')
sys.modules['mappyfile.plugins'] = plugins

for ep in pkg_resources.iter_entry_points(group='mappyfile.plugins'):
    setattr(plugins, ep.name, ep.load())

# Set default logging handler to avoid "No handler found" warnings.

try:  # Python 2.7+
    from logging import NullHandler
except ImportError:
    class NullHandler(logging.Handler):
        def emit(self, record):
            pass

logging.getLogger("mappyfile").addHandler(NullHandler())
Ejemplo n.º 25
0
from six import iteritems

from bitbucket.exceptions import BitbucketError

try:  # Python 2.7+
    from logging import NullHandler
except ImportError:

    class NullHandler(logging.Handler):
        def emit(self, record):
            pass


from bitbucket.utils import CaseInsensitiveDict, json_loads

logging.getLogger('bitbucket').addHandler(NullHandler())

__all__ = ('Repo', 'Project', 'Resource', 'PullRequest', 'User')


def dict2resource(raw, top=None, options=None, session=None):
    if top is None:
        top = PropertyHolder(raw)

    seqs = tuple, list, set, frozenset
    for i, j in iteritems(raw):
        if isinstance(j, dict):
            if 'self' in j:
                resource = cls_for_resource(j['self'])(options, session, j)
                setattr(top, i, resource)
            elif j.has_key('links') and j['links'].has_key('self'):
Ejemplo n.º 26
0
# -*- coding: utf-8 -*-
import logging
try:  # Python 2.7+
    from logging import NullHandler
except ImportError:

    class NullHandler(logging.Handler):
        def emit(self, record):
            pass


logging.getLogger('pymailutils').addHandler(NullHandler())
Ejemplo n.º 27
0
)
from ..rules import (
    AudioChannelsRule,
    ClosedCaptionRule,
    HearingImpairedRule,
    LanguageRule,
    ResolutionRule,
)
from ..units import units
from ..utils import (
    define_candidate,
    detect_os,
)

logger = getLogger(__name__)
logger.addHandler(NullHandler())


WARN_MSG = '''
=========================================================================================
MediaInfo not found on your system or could not be loaded.
Visit https://mediaarea.net/ to download it.
If you still have problems, please check if the downloaded version matches your system.
To load MediaInfo from a specific location, please define the location as follow:
  knowit --mediainfo /usr/local/mediainfo/lib <video_path>
  knowit --mediainfo /usr/local/mediainfo/bin <video_path>
  knowit --mediainfo "C:\Program Files\MediaInfo" <video_path>
  knowit --mediainfo C:\Software\MediaInfo.dll <video_path>
  knowit --mediainfo C:\Software\MediaInfo.exe <video_path>
  knowit --mediainfo /opt/mediainfo/libmediainfo.so <video_path>
  knowit --mediainfo /opt/mediainfo/libmediainfo.dylib <video_path>
Ejemplo n.º 28
0
from glob import glob
from logging import getLogger, NullHandler
from os.path import abspath, dirname, join
from sys import version_info

getLogger('bokeh').addHandler(NullHandler())
getLogger('tornado').addHandler(NullHandler())


class FatalException(Exception):

    '''
    Base class for exceptions that we can't ignore at some higher level
    (fundamental things like bad config).
    '''

    pass


from .commands.activities import activities
from .commands.args import COMMAND, parser, NamespaceWithVariables, PROGNAME, HELP, DEV, DIARY, FIT, \
    PACKAGE_FIT_PROFILE, ACTIVITIES, NO_OP, CONFIG, CONSTANTS, STATISTICS, TEST_SCHEDULE, MONITOR, GARMIN, \
    UNLOCK, DUMP, FIX_FIT, CH2_VERSION, JUPYTER, TUI
from .commands.constants import constants
from .commands.dump import dump
from .commands.config import config
from .commands.diary import diary
from .commands.fit import fit
from .commands.fix_fit import fix_fit
from .commands.garmin import garmin
from .commands.jupyter import jupyter
Ejemplo n.º 29
0
    'Priority',
    'Version',
    'Role',
    'Resolution',
    'SecurityLevel',
    'Status',
    'User',
    'Group',
    'CustomFieldOption',
    'RemoteLink',
    'Customer',
    'ServiceDesk',
    'RequestType',
)

logging.getLogger('jira').addHandler(NullHandler())


def get_error_list(r):
    error_list = []
    if r.status_code >= 400:
        if r.status_code == 403 and "x-authentication-denied-reason" in r.headers:
            error_list = [r.headers["x-authentication-denied-reason"]]
        elif r.text:
            try:
                response = json_loads(r)
                if 'message' in response:
                    # JIRA 5.1 errors
                    error_list = [response['message']]
                elif 'errorMessages' in response and len(
                        response['errorMessages']) > 0:
Ejemplo n.º 30
0
class DexcellRestApiAuth(object):
    """
        Class for authentification in Dexcell
        software.
    """

    def __init__(self, endpoint, hash_dexma, secret, logger_name="dexcell_rest_api_auth"):
        self.endpoint = endpoint
        self.hash = hash_dexma
        self.secret = secret
        self.logger = logging.getLogger(logger_name)
        if len(self.logger.handlers) == 0:
            self.logger.setLevel(logging.INFO)
            self.handler = NullHandler()
            h_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
            self.handler.setFormatter(logging.Formatter(h_format))
            self.logger.addHandler(self.handler)

    def _json_date_handler(self, obj):
        return obj.isoformat() if hasattr(obj, "isoformat") else obj

    def _datetime_parser(self, dct):
        DATE_FORMAT = "%Y-%m-%dT%H:%M:%S"
        strp = datetime.strptime
        for k, v in dct.items():
            if isinstance(v, basestring) and re.search("\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", v):
                try:
                    dct[k] = strp(v, DATE_FORMAT)
                except ValueError:
                    pass
        return dct

    def _call_rest(self, url):
        url = self.endpoint + url
        req = urllib2.Request(url)
        response = urllib2.urlopen(req)
        data = response.read()
        self.logger.info(data)
        return data

    def perm_token(self, temp_token):
        """ obtain permanent token for oauth authentication"""
        url = "/oauth/accesstoken?temp_token=%s&secret=%s&idclient=%s" % (str(temp_token), self.secret, self.hash)
        response = self._call_rest(url)
        return response

    def set_key_value(self, key, value):
        "Set this key with this value in the key-value data store"
        url = self.endpoint + "/things/set/" + key
        req = urllib2.Request(
            url, json.dumps(value, default=self._json_date_handler), headers={"x-dexcell-secret": self.secret}
        )
        self.logger.info("storing key: %s with secret: %s" % (key, self.secret))
        response = urllib2.urlopen(req)
        data = response.read()
        return data

    def get_key(self, key):
        "Get this key from the key-value data store"
        url = "%s/things/get/%s" % (self.endpoint, key)
        req = urllib2.Request(url, headers={"x-dexcell-secret": self.secret})
        response = urllib2.urlopen(req)
        data = response.read()
        data = json.loads(data, object_hook=self._datetime_parser)
        result = json.loads(data["result"], object_hook=self._datetime_parser)
        return result
Ejemplo n.º 31
0
class DexcellSender(object):

    DEFAULT_SERVER = "insert.dexcell.com"
    DEFAULT_URL = "/insert-json.htm"
    DEFAULT_LOGFILE = "/var/log/dexma/DexcellSender.log"
    DEFAULT_LOGLEVEL = logging.INFO
    DEFAULT_GATEWAY = "None"
    DEFAULT_LOGGERNAME = "DexcellSender"

    def __init__(
        self,
        gateway=DEFAULT_GATEWAY,
        loggerName=DEFAULT_LOGGERNAME,
        logfile=DEFAULT_LOGFILE,
        loglevel=DEFAULT_LOGLEVEL,
        server=DEFAULT_SERVER,
        url=DEFAULT_URL,
        https=True,
        timeout=30.0,
    ):
        self.__https = https
        self.__server = server
        self.__url = url
        self.__timeout = timeout
        self.__gateway = gateway
        self.__logger = logging.getLogger(loggerName)
        if len(self.__logger.handlers) == 0:
            self.__logger.setLevel(loglevel)
            self.handler = NullHandler()
            h_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
            self.handler.setFormatter(logging.Formatter(h_format))
            self.__logger.addHandler(self.handler)

    def setup(
        self,
        gateway=DEFAULT_GATEWAY,
        loggerName=DEFAULT_LOGGERNAME,
        logfile=DEFAULT_LOGFILE,
        loglevel=DEFAULT_LOGLEVEL,
        server=DEFAULT_SERVER,
        url=DEFAULT_URL,
    ):
        """Setup the Dexcell Sender Object
        """
        self.__server = server
        self.__url = url
        self.__gateway = gateway
        self.__logger = logging.getLogger(loggerName)
        if len(self.__logger.handlers) == 0:
            self.__logger.setLevel(loglevel)
            self.handler = NullHandler()
            h_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
            self.handler.setFormatter(logging.Formatter(h_format))
            self.__logger.addHandler(self.handler)

    def changeGateway(self, gateway):
        """Change the gateway mac that will be sent
        """
        self.__gateway = gateway

    def __insertRawJSONData(self, data):
        """Insert the raw data string to the server
        """
        params = "data=" + data
        headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"}
        if self.__https:
            conn = httplib.HTTPSConnection(self.__server, timeout=self.__timeout)
        else:
            conn = httplib.HTTPConnection(self.__server, timeout=self.__timeout)
        conn.request("POST", self.__url, params, headers)
        error = True
        maxerror = 0
        while error:
            try:
                response = conn.getresponse()
                error = False
            except:
                print self.__logger.exception("Error inserting data")
                maxerror = maxerror + 1
                time.sleep(1)
                if maxerror > 10:
                    return (-1, "FAIL")
        logger_msg_wo_params = "Insert from %s with status %s and result %s"
        logger_params = (self.__gateway, str(response.status), str(response.getheader("data")))
        logger_message = logger_msg_wo_params % logger_params
        self.__logger.debug(logger_message)
        return response.status, response.getheader("data")

    def insertDexcellServiceMessage(self, serviceMessage, timezone="UTC", extraparams={}):
        """Insert a single DexcellServiceMessage
        """
        reading = {
            "nodeNetworkId": str(serviceMessage.node),
            "serviceNetworkId": int(serviceMessage.service),
            "value": float(serviceMessage.value),
            "seqNum": int(serviceMessage.seqnum),
            "timeStamp": time.strftime("%Y-%m-%dT%H:%M:%S.000 " + timezone, serviceMessage.timestamp),
        }
        data = {"gatewayId": self.__gateway, "service": [reading]}
        for key in extraparams.keys():
            data[key] = extraparams[key]
        result = self.__insertRawJSONData(json.dumps(data))
        return result

    def insertDexcellServiceMessages(self, serviceMessageIterator, timezone="UTC", extraparams={}):
        """ Insert many DexcellServiceMessages at once
        """
        readings = []
        for serviceMessage in serviceMessageIterator:
            reading = {
                "nodeNetworkId": str(serviceMessage.node),
                "serviceNetworkId": int(serviceMessage.service),
                "value": float(serviceMessage.value),
                "seqNum": int(serviceMessage.seqnum),
                "timeStamp": time.strftime("%Y-%m-%dT%H:%M:%S.000 " + timezone, serviceMessage.timestamp),
            }
            readings.append(reading)
        data = {"gatewayId": self.__gateway, "service": readings}
        for key in extraparams.keys():
            data[key] = extraparams[key]
        result = self.__insertRawJSONData(json.dumps(data))
        return result
Ejemplo n.º 32
0
class DexcellRestApi(object):

    """
        class with all the utils API calls available group by
        from deployment calls, location calls and devide calls.
    """

    def __init__(self, endpoint, token, logger_name="dexcell_rest_api"):
        self.endpoint = endpoint
        self.token = token
        self.logger = logging.getLogger(logger_name)
        if len(self.logger.handlers) == 0:
            self.logger.setLevel(logging.INFO)
            self.handler = NullHandler()
            h_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
            self.handler.setFormatter(logging.Formatter(h_format))
            self.logger.addHandler(self.handler)

    def _json_date_handler(self, obj):
        return obj.isoformat() if hasattr(obj, "isoformat") else obj

    def _datetime_parser(self, dct):
        DATE_FORMAT = "%Y-%m-%dT%H:%M:%S"
        for k, v in dct.items():
            if isinstance(v, basestring) and re.search("\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", v):
                try:
                    dct[k] = datetime.strptime(v, DATE_FORMAT)
                except:
                    pass
        return dct

    def dxdate(self, dt):
        """ convert datetime into default date string format used in dexcell api calls """
        return dt.strftime("%Y%m%d%H%M%S")

    def _call_rest(self, url, payload=None, parse_response=True):
        url = self.endpoint + url
        self.logger.info("url:%s token:%s" % (url, self.token))
        if payload is None:
            req = urllib2.Request(url, headers={"x-dexcell-token": self.token})
        else:
            req = urllib2.Request(url, payload, headers={"x-dexcell-token": self.token})
        try:
            response = urllib2.urlopen(req, timeout=600.0)
            data = response.read()
            if parse_response:
                return json.loads(data)
            else:
                return data
        except urllib2.HTTPError as httperror:

            info = json.loads(httperror.read())

            if httperror.code == 404:
                self.logger.error("error: not found")
                raise DexcellRestApiError("NOTFOUND", info["description"], info["moreInfo"])
            elif httperror.code == 401:
                self.logger.error("error: not authorized")
                raise DexcellRestApiError("INVALIDTOKEN", info["description"], info["moreInfo"])
            else:
                raise DexcellRestApiError("UNKNOWN", info["description"], info["moreInfo"])
                self.logger.error("error: %s" % (str(httperror.code)))

    def get_deployment(self, dep_id):
        """ return dict with basic information from deployment number dep_id"""
        url = "/deployments/%i.json" % dep_id
        deployment = self._call_rest(url)
        return deployment

    def get_deployment_locations(self, dep_id):
        """ return array with locations information from deployment number dep_id"""
        url = "/deployments/%i/locations.json" % dep_id
        location_list = self._call_rest(url)
        return location_list

    def get_deployment_devices(self, dep_id):
        """ return array with devices information from deployment number dep_id"""
        url = "/deployments/%i/devices.json" % dep_id
        device_list = self._call_rest(url)
        return device_list

    def get_deployment_parameters(self, dep_id):
        """ return array with parameters {freq, name, id, i18m, units}
            from deployment number dep_id
        """
        url = "/deployments/%i/parameters.json" % dep_id
        param_list = self._call_rest(url)
        return param_list

    def get_deployment_supplies(self, dep_id):
        """ return array with supplies {pod, name, id}
            from deployment number dep_id
        """
        url = "/deployments/%i/supplies.json" % dep_id
        supply_list = self._call_rest(url)
        return supply_list

    def get_deployment_notices(self, dep_id, start, end):
        """ return array with alerts information from deployment number dep_id
            from the interval selected
        """
        start = self.dxdate(start)
        end = self.dxdate(end)
        url = "/deployments/%i/notices.json?start=%s&end=%s" % (dep_id, start, end)
        notice_list = self._call_rest(url)
        return notice_list

    def get_deployment_parameter_devices(self, dep_id, param_nid):
        """ return array with parameters {id, name, networkid}
            from deployment number dep_id
        """
        url = "/deployments/%i/parameters/%s/devices.json" % (dep_id, str(param_nid))
        device_list = self._call_rest(url)
        return device_list

    def set_deployment_thing(self, dep_id, key, value):
        """ update dict of information saved by the user"""
        url = "/deployments/%i/things/set/%s.json" % (dep_id, key)
        payload = json.dumps(value, default=self._json_date_handler)
        data = self._call_rest(url, payload=payload, parse_response=False)
        return data

    def get_deployment_thing(self, dep_id, key):
        """ return dict of information saved by the user"""
        url = "/deployments/%i/things/get/%s.json" % (dep_id, key)
        data = self._call_rest(url, parse_response=False)
        self.logger.info("dep_thing:%s" % str(data))
        data = json.loads(data, object_hook=self._datetime_parser)
        return data

    def get_location(self, loc_id):
        """ return dict with basic information from locat"""
        url = "/locations/%i.json" % loc_id
        location = self._call_rest(url)
        return location

    def get_location_parameters(self, loc_id):
        """ return array with parameters {freq, name, id, i18m, units}
            from location number loc_id
        """
        url = "/locations/%i/parameters.json" % loc_id
        param_list = self._call_rest(url)
        return param_list

    def get_location_notices(self, loc_id, start, end):
        """ return array with alerts information for location number loc_id
            from the interval selected
        """
        start = self.dxdate(start)
        end = self.dxdate(end)
        url = "/locations/%i/notices.json?start=%s&end=%s" % (loc_id, start, end)
        notice_list = self._call_rest(url)
        return notice_list

    def get_location_comments(self, loc_id, start, end):
        """ return array with comments information for location number loc_id
            from the interval selected
        """
        start = self.dxdate(start)
        end = self.dxdate(end)
        url = "/locations/%i/comments.json?start=%s&end=%s" % (loc_id, start, end)
        comments = self._call_rest(url)
        return comments

    def get_location_parameter_devices(self, loc_id, param_nid):
        """ return array with parameters {id, name, networkid}
            from location number loc_id
        """
        url = "/locations/%i/parameters/%i/devices.json" % (loc_id, param_nid)
        device_list = self._call_rest(url)
        return device_list

    def get_location_supplies(self, loc_id):
        """ return array with supplies {pod, name, id}
            from location number loc_id
        """
        url = "/locations/%i/supplies.json" % loc_id
        supply_list = self._call_rest(url)
        return supply_list

    def get_location_devices(self, loc_id):
        """ return array with the devices from the location """
        url = "/locations/%i/devices.json" % loc_id
        device_list = self._call_rest(url)
        return device_list

    def get_device(self, dev_id):
        """ return dict with information for the device """
        url = "/devices/%i.json" % dev_id
        device = self._call_rest(url)
        return device

    def get_device_parameters(self, dev_id):
        """ return array with parameters {freq, name, id, i18m, units}
            from device number dev_id
        """
        param_list = self._call_rest("/devices/" + str(dev_id) + "/parameters.json")
        return param_list

    def get_simulated_bill(
        self, dev_id, start, end, type_param="ELECTRICAL", parameters="AAANNN", pod=None, time="HOUR"
    ):
        """ returns bill generated from data in dexcell
            Parameters
                A : RAW + SUMMARY
                R: RAW set of datas returned by time mesure, default hour
                S: SUMMARY resum of data: totals, periods...
                N: nothing
            type_param can be ELECTRICAL, WATER, GAS
        """
        new_pod = ""
        if pod is not None:
            new_pod = "&pod=" + pod
        start = start.strftime("%Y%m%d%H%M%S")
        end = end.strftime("%Y%m%d%H%M%S")
        url = ["/cost/%i/%s.json?start=%s" % (dev_id, type_param, start)]
        url.append("&end=%s&applyPattern=%s&period=%s%s" % (end, parameters, time, new_pod))
        url = "".join(url)
        bill = self._call_rest(url)
        return bill

    def get_supply_bills(self, sup_id, start, end, type_param="ELECTRICAL", parameters="AAANNN", pod=None, time="HOUR"):
        """ returns bills updated by the customer
            Parameters
                A : RAW + SUMMARY
                R: RAW set of datas returned by time mesure, default hour
                S: SUMMARY resum of data: totals, periods...
                N: nothing
            type_param can be ELECTRICAL, WATER, GAS
        """
        new_pod = ""
        if pod is not None:
            new_pod = "&pod=" + pod
        start = start.strftime("%Y%m%d%H%M%S")
        end = end.strftime("%Y%m%d%H%M%S")
        url = ["/cost/%i/bills/%s.json?start=%s" % (sup_id, type_param, start)]
        url.append("&end=%s&applyPattern=%s&period=%s%s" % (end, parameters, time, new_pod))
        url = "".join(url)
        bills = self._call_rest(url)
        return bills

    def get_session(self, session_id):
        """ return the session for an app with a concret session_id"""
        url = "/session/%s.json" % session_id
        response = self._call_rest(url)
        self.logger.info("get session: " + str(response))
        return response

    def get_readings(self, dev_id, s_nid, start, end):
        """ return array dict with {values, timestamp} """
        start = self.dxdate(start)
        end = self.dxdate(end)
        url = "/devices/%i/%i/readings.json?start=%s&end=%s" % (dev_id, s_nid, start, end)
        readings = self._call_rest(url)
        for i in range(0, len(readings)):
            try:
                readings[i]["ts"] = datetime.strptime(readings[i]["ts"], "%Y-%m-%d %H:%M:%S")
                readings[i]["tsutc"] = datetime.strptime(readings[i]["tsutc"], "%Y-%m-%d %H:%M:%S")
            except KeyError:
                pass
        return readings

    def get_readings_new(self, dev_id, param, frequency, operation, start, end):
        """ returns array of dict of values from the device dev_id with
            parameter param with a frequency in the interval start - end.
        """
        start = self.dxdate(start)
        end = self.dxdate(end)
        url = ["/devices/%i/%s/readings.json?" % (dev_id, str(param))]
        url.append("start=%s&end=%s&frequency=%s&operation=%s" % (start, end, str(frequency), str(operation)))
        url = "".join(url)
        readings = self._call_rest(url)
        for i in range(0, len(readings)):
            try:
                readings[i]["ts"] = datetime.strptime(readings[i]["ts"], "%Y-%m-%d %H:%M:%S")
                readings[i]["tsutc"] = datetime.strptime(readings[i]["tsutc"], "%Y-%m-%d %H:%M:%S")
            except KeyError:
                pass
        return readings

    def get_cost(self, nid, start, end, energy_type="ELECTRICAL", period="HOUR", grouped=False):
        """ return array from cost and consumption with timestamp"""
        str_grouped = "TRUE"
        if not grouped:
            str_grouped = "FALSE"
        start = self.dxdate(start)
        end = self.dxdate(end)
        url = ["/devices/%i/%s/cost.json?" % (nid, energy_type)]
        url.append("start=%s&end=%s&period=%s&grouped=%s" % (start, end, str(period), str_grouped))
        url = "".join(url)
        raw_response = self._call_rest(url)
        try:
            readings = raw_response["readings"]
            for i in range(0, len(readings)):
                readings[i]["ts"] = datetime.strptime(readings[i]["ts"], "%Y/%m/%d %H:%M:%S")
            periods = raw_response["periods"]
            return readings, periods
        except KeyError:
            return []
Ejemplo n.º 33
0
LOG = logging.getLogger(name=__name__)
if sys.version_info < (2, 7):
    class NullHandler(logging.Handler):
        """Copied from Python 2.7 to avoid getting `No handlers could be found
        for logger "xxx"` http://bugs.python.org/issue16539
        """
        def handle(self, record):
            pass
        def emit(self, record):
            pass
        def createLock(self):
            self.lock = None
else:
    from logging import NullHandler

LOG.addHandler(NullHandler())

# PEP 396 style version marker
try:
    __version__ = pkg_resources.get_distribution(u'eudat.accounting.client').version
except:
    LOG.warning("Could not get the package version from pkg_resources")
    __version__ = 'unknown'

# FIXME: This is just for checking doctests setup. You may remove this function.
# See tests/test_doctests.py from this distro root
def identity(obj):
    """Returns the ``obj`` parameter itself

    :param obj: The parameter to be returned
    :return: ``obj`` itself
Ejemplo n.º 34
0
import logging
from logging import NullHandler
from snueue.config.base import *

DEBUG = True
ASSETS_DEBUG = True

COMPASS_CONFIG = {'sourcemap': 'true'}

SECRET_KEY = get_secret('SNUEUE_SECRET_KEY')

# Optional host to be passed into app.run. Useful for running in docker 
# so app can run with the host as 0.0.0.0 and be accessible from the host.
# Defaults to 127.0.0.1 (localhost)
HOST = get_secret('SNUEUE_HOST', False)

REDIS_HOST = get_secret('SNUEUE_DATABASE_HOST', False) or "localhost"

# Reddit API settings
REDDIT_USER_AGENT = get_secret('SNUEUE_REDDIT_USER_AGENT')
REDDIT_CLIENT_ID = get_secret('SNUEUE_REDDIT_CLIENT_ID')
REDDIT_CLIENT_SECRET = get_secret('SNUEUE_REDDIT_CLIENT_SECRET')

# Logging
# Use a no-op logger for developmenet since logs are printed by the
# debug server automatically.
LOGGING_LEVEL = logging.DEBUG
null_handler = NullHandler()
null_handler.setLevel(LOGGING_LEVEL)
LOGGING_HANDLER = null_handler
Ejemplo n.º 35
0
 def __init__(self, collector, level=NOTSET):
     self.collector = collector
     NullHandler.__init__(self, level)
Ejemplo n.º 36
0
from __future__ import absolute_import, unicode_literals

import logging

from concurrency.config import conf

# Set default logging handler to avoid "No handler found" warnings.
try:  # Python 2.7+
    from logging import NullHandler
except ImportError:
    class NullHandler(logging.Handler):
        def emit(self, record):
            pass

logging.getLogger('concurrency').addHandler(NullHandler())

logger = logging.getLogger(__name__)

__all__ = []


def get_version_fieldname(obj):
    return obj._concurrencymeta.field.attname


def _set_version(obj, version):
    """
    Set the given version on the passed object

    This function should be used with 'raw' values, any type conversion should be managed in
    VersionField._set_version_value(). This is needed for future enhancement of concurrency.
Ejemplo n.º 37
0
def get_logger(name):
    logger = getLogger(ROOT_LOGGER + name)  # pylint: disable=invalid-name
    logger.addHandler(NullHandler())

    return logger
Ejemplo n.º 38
0
# Require Paho-MQTT https://pypi.org/project/paho-mqtt/

from argparse import ArgumentParser, FileType
from configparser import ConfigParser
from json import dumps as jdumps
from logging import (DEBUG, ERROR, Formatter, Logger, NullHandler,
                     StreamHandler, getLogger)
from pprint import pprint
from socket import gethostname
from struct import unpack as sunpack, calcsize as scalc
from sys import exit as sysexit, modules, stderr
from time import sleep
from traceback import print_exc
from typing import Iterable, Optional, Set, Union
# workaround to prevent Bleson to setup up a logging basicConfig
Logger.root.addHandler(NullHandler())
from bleson import get_provider, logger, Observer
from paho.mqtt.client import Client, connack_string

TRUE_BOOLEANS = ['on', 'high', 'true', 'enable', 'enabled', 'yes', '1']
"""String values evaluated as true boolean values"""

FALSE_BOOLEANS = ['off', 'low', 'false', 'disable', 'disabled', 'no', '0']
"""String values evaluated as false boolean values"""


class MqttClient(Client):
    def __init__(self, *args, **kwargs):
        self._log = getLogger('xitherm.mqtt')
        super().__init__(*args, **kwargs)
Ejemplo n.º 39
0
# -*- coding: UTF-8 -*-

from socket import SHUT_RDWR, error as SOCKET_ERROR, timeout as SOCKET_TIMEOUT
from struct import pack, unpack
from logging import getLogger, NullHandler
import select

from librouteros.exceptions import ConnectionError, FatalError

LOGGER = getLogger('librouteros')
LOGGER.addHandler(NullHandler())


class Encoder:
    def encodeSentence(self, *words):
        """
        Encode given sentence in API format.

        :param words: Words to endoce.
        :returns: Encoded sentence.
        """
        encoded = map(self.encodeWord, words)
        encoded = b''.join(encoded)
        # append EOS (end of sentence) byte
        encoded += b'\x00'
        return encoded

    def encodeWord(self, word):
        """
        Encode word in API format.
Ejemplo n.º 40
0
    def init_logging(self,
                     console_logging=False,
                     file_logging=False,
                     debug_logging=False,
                     database_logging=False):
        """
        Initialize logging

        :param console_logging: True if logging to console
        :param file_logging: True if logging to file
        :param debug_logging: True if debug logging is enabled
        :param database_logging: True if logging database access
        """
        self.log_file = self.log_file or os.path.join(settings.LOG_DIR,
                                                      'sickchill.log')

        global log_file
        log_file = self.log_file

        self.debug_logging = debug_logging
        self.console_logging = console_logging
        self.file_logging = file_logging
        self.database_logging = database_logging

        logging.addLevelName(DB, 'DB')  # add a new logging level DB
        logging.getLogger().addHandler(NullHandler())  # nullify root logger

        # set custom root logger
        for logger in self.loggers:
            if logger is not self.logger:
                logger.root = self.logger
                logger.parent = self.logger

        log_level = DB if self.database_logging else DEBUG if self.debug_logging else INFO

        # set minimum logging level allowed for loggers
        for logger in self.loggers:
            if logger.name in ('subliminal', 'tornado.access',
                               'tornado.general',
                               'imdbpy.parser.http.piculet'):
                logger.setLevel('CRITICAL')
            else:
                logger.setLevel(log_level)

        log_format = '{asctime} {levelname} :: {threadName} :: {message}'
        # console log handler
        if self.console_logging:
            console = logging.StreamHandler()
            console.setFormatter(DispatchFormatter(log_format, dateTimeFormat))
            console.setLevel(log_level)

            for logger in self.loggers:
                logger.addHandler(console)

        # rotating log file handler
        if self.file_logging:
            rfh = logging.handlers.RotatingFileHandler(
                self.log_file,
                maxBytes=int(settings.LOG_SIZE * 1048576),
                backupCount=settings.LOG_NR)
            rfh.setFormatter(DispatchFormatter(log_format, dateTimeFormat))
            rfh.setLevel(log_level)

            for logger in self.loggers:
                logger.addHandler(rfh)
Ejemplo n.º 41
0
try:
    from ui_js_plot_dialog import Ui_JSPlotDialog
except ImportError:
    from .ui_js_plot_dialog import Ui_JSPlotDialog

import cv2

# Log file setting.
# import logging
# logging.basicConfig(filename='MainWindow.log', level=logging.DEBUG)

# Log output setting.
# If handler = StreamHandler(), log will output into StandardOutput.
from logging import getLogger, NullHandler, StreamHandler, DEBUG
logger = getLogger(__name__)
handler = NullHandler() if True else StreamHandler()
handler.setLevel(DEBUG)
logger.setLevel(DEBUG)
logger.addHandler(handler)

# determine if application is a script file or frozen exe
if getattr(sys, 'frozen', False):
    current_dir_path = sys._MEIPASS
    print(current_dir_path)
    if os.name == 'nt':
        import win32api

        win32api.SetDllDirectory(sys._MEIPASS)
elif __file__:
    current_dir_path = os.getcwd()
Ejemplo n.º 42
0
import time
import ConfigParser

from six import text_type, string_types

# Disable logging message trigged by pSphere/suds.
try:
    from logging import NullHandler
except ImportError:
    from logging import Handler

    class NullHandler(Handler):
        def emit(self, record):
            pass

logging.getLogger('psphere').addHandler(NullHandler())
logging.getLogger('suds').addHandler(NullHandler())

from psphere.client import Client
from psphere.errors import ObjectNotFoundError
from psphere.managedobjects import HostSystem, VirtualMachine, ManagedObject, Network, ClusterComputeResource
from suds.sudsobject import Object as SudsObject


class VMwareInventory(object):

    def __init__(self, guests_only=None):
        self.config = ConfigParser.SafeConfigParser()
        if os.environ.get('VMWARE_INI', ''):
            config_files = [os.environ['VMWARE_INI']]
        else: