Exemplo n.º 1
0
 def Enable(self):
     """Enable remote debugging"""
     try:
         Remote.on()
     except:
         self.logger.error("Failed to create Heapy context")
         raise Bcfg2.Server.Plugin.PluginInitError
Exemplo n.º 2
0
 def Disable(self):
     """Disable remote debugging"""
     try:
         Remote.off()
     except:
         self.logger.error("Failed to disable Heapy")
         raise Bcfg2.Server.Plugin.PluginInitError
Exemplo n.º 3
0
 def Disable(self):
     """Disable remote debugging"""
     try:
         Remote.off()
     except:
         self.logger.error("Failed to disable Heapy")
         raise Bcfg2.Server.Plugin.PluginInitError
Exemplo n.º 4
0
 def Enable(self):
     """Enable remote debugging"""
     try:
         Remote.on()
     except:
         self.logger.error("Failed to create Heapy context")
         raise Bcfg2.Server.Plugin.PluginInitError
Exemplo n.º 5
0
def monitor():
    """monitor() [0]

Start an interactive remote monitor.

This can be used to get information about the state, in
particular the memory usage, of separately running Python
processes.

References
    [0] heapy_Use.html#heapykinds.Use.monitor"""
    from guppy.heapy import Remote
    Remote.off()
    m = Monitor()
    m.run()
Exemplo n.º 6
0
def monitor():
    """monitor() [0]

Start an interactive remote monitor.

This can be used to get information about the state, in
particular the memory usage, of separately running Python
processes. 

References
    [0] heapy_Use.html#heapykinds.Use.monitor"""
    from guppy.heapy import Remote
    Remote.off()
    m = Monitor()
    m.run()
Exemplo n.º 7
0
def main():
	import argparse
	parser = argparse.ArgumentParser(
		description='Start the IRC helper bot.')

	parser.add_argument('-e', '--relay-enable',
		action='append', metavar='relay', default=list(),
		help='Enable only the specified relays, can be specified multiple times.')
	parser.add_argument('-d', '--relay-disable',
		action='append', metavar='relay', default=list(),
		help='Explicitly disable specified relays,'
			' can be specified multiple times. Overrides --relay-enable.')

	parser.add_argument('-c', '--config',
		action='append', metavar='path', default=list(),
		help='Configuration files to process.'
			' Can be specified more than once.'
			' Values from the latter ones override values in the former.'
			' Available CLI options override the values in any config.')

	parser.add_argument('-n', '--dry-run', action='store_true',
		help='Connect to IRC, but do not communicate there,'
			' dumping lines-to-be-sent to the log instead.')
	parser.add_argument('--fatal-errors', action='store_true',
		help='Do not try to ignore entry_point'
			' init errors, bailing out with traceback instead.')
	parser.add_argument('--debug',
		action='store_true', help='Verbose operation mode.')
	parser.add_argument('--debug-memleaks', action='store_true',
		help='Import guppy and enable its manhole to debug memleaks (requires guppy module).')
	parser.add_argument('--noise',
		action='store_true', help='Even more verbose mode than --debug.')
	optz = parser.parse_args()

	## Read configuration files
	cfg = lya.AttrDict.from_yaml('{}.yaml'.format(splitext(realpath(__file__))[0]))
	for k in optz.config: cfg.update_yaml(k)

	## CLI overrides
	if optz.dry_run: cfg.debug.dry_run = optz.dry_run

	## Logging
	import logging
	logging.NOISE = logging.DEBUG - 1
	logging.addLevelName(logging.NOISE, 'NOISE')
	try: from twisted.python.logger._stdlib import fromStdlibLogLevelMapping
	except ImportError: pass # newer twisted versions only
	else: fromStdlibLogLevelMapping[logging.NOISE] = logging.NOISE
	if optz.noise: lvl = logging.NOISE
	elif optz.debug: lvl = logging.DEBUG
	else: lvl = logging.WARNING
	lya.configure_logging(cfg.logging, lvl)
	log.PythonLoggingObserver().start()

	for lvl in 'noise', 'debug', 'info', ('warning', 'warn'), 'error', ('critical', 'fatal'):
		lvl, func = lvl if isinstance(lvl, tuple) else (lvl, lvl)
		assert not hasattr(log, lvl)
		setattr(log, func, ft.partial( log.msg,
			logLevel=logging.getLevelName(lvl.upper()) ))

	# Manholes
	if optz.debug_memleaks:
		import guppy
		from guppy.heapy import Remote
		Remote.on()

	## Fake "xattr" module, if requested
	if cfg.core.xattr_emulation:
		import shelve
		xattr_db = shelve.open(cfg.core.xattr_emulation, 'c')
		class xattr_path(object):
			def __init__(self, base):
				assert isinstance(base, str)
				self.base = base
			def key(self, k): return '{}\0{}'.format(self.base, k)
			def __setitem__(self, k, v): xattr_db[self.key(k)] = v
			def __getitem__(self, k): return xattr_db[self.key(k)]
			def __del__(self): xattr_db.sync()
		class xattr_module(object): xattr = xattr_path
		sys.modules['xattr'] = xattr_module

	## Actual init
	# Merge entry points configuration with CLI opts
	conf = ep_config( cfg,
		[ dict(ep='relay_defaults'),
			dict( ep='modules',
				enabled=optz.relay_enable, disabled=optz.relay_disable ) ] )
	(conf_base, conf), (conf_def_base, conf_def) =\
		op.itemgetter('modules', 'relay_defaults')(conf)
	for subconf in conf.viewvalues(): subconf.rebase(conf_base)
	relays, channels, routes = (
		dict( (name, subconf) for name,subconf in conf.viewitems()
		if name[0] != '_' and subconf.get('type') == subtype )
		for subtype in ['relay', 'channel', 'route'] )

	# Init interface
	interface = routing.BCInterface(
		irc_enc=cfg.core.encoding,
		chan_prefix=cfg.core.channel_prefix,
		max_line_length=cfg.core.max_line_length,
		dry_run=cfg.debug.dry_run )

	# Find out which relay entry_points are actually used
	route_mods = set(it.chain.from_iterable(
		it.chain.from_iterable(
			(mod if isinstance(mod, list) else [mod])
			for mod in ((route.get(k) or list()) for k in ['src', 'dst', 'pipe']) )
		for route in routes.viewvalues() ))
	for name in list(route_mods):
		try:
			name_ep = relays[name].name
			if name == name_ep: continue
		except KeyError: pass
		else:
			route_mods.add(name_ep)
			route_mods.remove(name)

	# Init relays
	relays_obj = dict()
	for ep in get_relay_list():
		if ep.name[0] == '_':
			log.debug( 'Skipping entry_point with name'
				' prefixed by underscore: {}'.format(ep.name) )
			continue
		if ep.name not in route_mods:
			log.debug(( 'Skipping loading relay entry_point {}'
				' because its not used in any of the routes' ).format(ep.name))
			continue
		ep_relays = list( (name, subconf)
			for name, subconf in relays.viewitems()
			if subconf.get('name', name) == ep.name )
		if not ep_relays: ep_relays = [(ep.name, conf_base.clone())]
		for name, subconf in ep_relays:
			try: relay_defaults = conf_def[ep.name]
			except KeyError: pass
			else:
				subconf.rebase(relay_defaults)
				subconf.rebase(conf_def_base)
			if subconf.get('enabled', True):
				log.debug('Loading relay: {} ({})'.format(name, ep.name))
				try:
					obj = ep.load().relay(subconf, interface=interface)
					if not obj: raise AssertionError('Empty object')
				except Exception as err:
					if optz.fatal_errors: raise
					log.error('Failed to load/init relay {}: {} {}'.format(ep.name, type(err), err))
					obj, subconf.enabled = None, False
			if obj and subconf.get('enabled', True): relays_obj[name] = obj
			else:
				log.debug(( 'Entry point object {!r} (name:'
					' {}) was disabled after init' ).format(obj, ep.name) )
	for name in set(relays).difference(relays_obj):
		log.debug(( 'Unused relay configuration - {}: no such'
			' entry point - {}' ).format(name, relays[name].get('name', name)))
	if not relays_obj:
		log.fatal('No relay objects were properly enabled/loaded, bailing out')
		sys.exit(1)
	log.debug('Enabled relays: {}'.format(relays_obj))

	# Relays-client interface
	interface.update(relays_obj, channels, routes)

	# Server
	if cfg.core.connection.server.endpoint:
		password = cfg.core.connection.get('password')
		if not password:
			from hashlib import sha1
			password = cfg.core.connection.password =\
				sha1(open('/dev/urandom', 'rb').read(120/8)).hexdigest()
		factory = irc.BCServerFactory(
			cfg.core.connection.server,
			*(chan.get('name', name) for name,chan in channels.viewitems()),
			**{cfg.core.connection.nickname: password} )
		endpoints\
			.serverFromString(reactor, cfg.core.connection.server.endpoint)\
			.listen(factory)

	# Client with proper endpoints + reconnection
	# See: http://twistedmatrix.com/trac/ticket/4472 + 4700 + 4735
	ep = endpoints.clientFromString(reactor, cfg.core.connection.endpoint)
	irc.BCClientFactory(cfg.core, interface, ep).connect()

	log.debug('Starting event loop')
	reactor.run()
Exemplo n.º 8
0
def main():
	import argparse
	parser = argparse.ArgumentParser(
		description='Collect and dispatch various metrics to destinations.')
	parser.add_argument('-t', '--destination', metavar='host[:port]',
		help='host[:port] (default port: 2003, can be overidden'
			' via config file) of sink destination endpoint (e.g. carbon'
			' linereceiver tcp port, by default).')
	parser.add_argument('-i', '--interval', type=int, metavar='seconds',
		help='Interval between collecting and sending the datapoints.')

	parser.add_argument('-e', '--collector-enable',
		action='append', metavar='collector', default=list(),
		help='Enable only the specified metric collectors,'
				' can be specified multiple times.')
	parser.add_argument('-d', '--collector-disable',
		action='append', metavar='collector', default=list(),
		help='Explicitly disable specified metric collectors,'
			' can be specified multiple times. Overrides --collector-enable.')

	parser.add_argument('-s', '--sink-enable',
		action='append', metavar='sink', default=list(),
		help='Enable only the specified datapoint sinks,'
				' can be specified multiple times.')
	parser.add_argument('-x', '--sink-disable',
		action='append', metavar='sink', default=list(),
		help='Explicitly disable specified datapoint sinks,'
			' can be specified multiple times. Overrides --sink-enable.')

	parser.add_argument('-p', '--processor-enable',
		action='append', metavar='processor', default=list(),
		help='Enable only the specified datapoint processors,'
				' can be specified multiple times.')
	parser.add_argument('-z', '--processor-disable',
		action='append', metavar='processor', default=list(),
		help='Explicitly disable specified datapoint processors,'
			' can be specified multiple times. Overrides --processor-enable.')

	parser.add_argument('-c', '--config',
		action='append', metavar='path', default=list(),
		help='Configuration files to process.'
			' Can be specified more than once.'
			' Values from the latter ones override values in the former.'
			' Available CLI options override the values in any config.')

	parser.add_argument('-a', '--xattr-emulation', metavar='db-path',
		help='Emulate filesystem extended attributes (used in'
			' some collectors like sysstat or cron_log), storing per-path'
			' data in a simple shelve db.')
	parser.add_argument('-n', '--dry-run',
		action='store_true', help='Do not actually send data.')
	parser.add_argument('--debug-memleaks', action='store_true',
		help='Import guppy and enable its manhole to debug memleaks (requires guppy module).')
	parser.add_argument('--debug',
		action='store_true', help='Verbose operation mode.')
	optz = parser.parse_args()

	# Read configuration files
	cfg = AttrDict.from_yaml('{}.yaml'.format(
		os.path.splitext(os.path.realpath(__file__))[0] ))
	for k in optz.config: cfg.update_yaml(k)

	# Logging
	import logging
	configure_logging( cfg.logging,
		logging.DEBUG if optz.debug else logging.WARNING )
	if not cfg.logging.tracebacks:
		class NoTBLogger(logging.Logger):
			def exception(self, *argz, **kwz): self.error(*argz, **kwz)
		logging.setLoggerClass(NoTBLogger)
	log = logging.getLogger(__name__)

	# Manholes
	if optz.debug_memleaks:
		import guppy
		from guppy.heapy import Remote
		Remote.on()

	# Fill "auto-detected" blanks in the configuration, CLI overrides
	try:
		if optz.destination: cfg.sinks._default.host = optz.destination
		cfg.sinks._default.host = cfg.sinks._default.host.rsplit(':', 1)
		if len(cfg.sinks._default.host) == 1:
			cfg.sinks._default.host =\
				cfg.sinks._default.host[0], cfg.sinks._default.default_port
		else: cfg.sinks._default.host[1] = int(cfg.sinks._default.host[1])
	except KeyError: pass
	if optz.interval: cfg.loop.interval = optz.interval
	if optz.dry_run: cfg.debug.dry_run = optz.dry_run
	if optz.xattr_emulation: cfg.core.xattr_emulation = optz.xattr_emulation

	# Fake "xattr" module, if requested
	if cfg.core.xattr_emulation:
		import shelve
		xattr_db = shelve.open(cfg.core.xattr_emulation, 'c')
		class xattr_path(object):
			def __init__(self, base):
				assert isinstance(base, str)
				self.base = base
			def key(self, k): return '{}\0{}'.format(self.base, k)
			def __setitem__(self, k, v): xattr_db[self.key(k)] = v
			def __getitem__(self, k): return xattr_db[self.key(k)]
			def __del__(self): xattr_db.sync()
		class xattr_module(object): xattr = xattr_path
		sys.modules['xattr'] = xattr_module

	# Override "enabled" collector/sink parameters, based on CLI
	ep_conf = dict()
	for ep, enabled, disabled in\
			[ ('collectors', optz.collector_enable, optz.collector_disable),
				('processors', optz.processor_enable, optz.processor_disable),
				('sinks', optz.sink_enable, optz.sink_disable) ]:
		conf = cfg[ep]
		conf_base = conf.pop('_default')
		if 'debug' not in conf_base: conf_base['debug'] = cfg.debug
		ep_conf[ep] = conf_base, conf, OrderedDict(), enabled, disabled

	# Init global cfg for collectors/sinks' usage
	from graphite_metrics import collectors, sinks, loops
	collectors.cfg = sinks.cfg = loops.cfg = cfg

	# Init pluggable components
	import pkg_resources

	for ep_type in 'collector', 'processor', 'sink':
		ep_key = '{}s'.format(ep_type) # a bit of a hack
		conf_base, conf, objects, enabled, disabled = ep_conf[ep_key]
		ep_dict = dict( (ep.name, ep) for ep in
			pkg_resources.iter_entry_points('graphite_metrics.{}'.format(ep_key)) )
		eps = OrderedDict(
			(name, (ep_dict.pop(name), subconf or AttrDict()))
			for name, subconf in conf.viewitems() if name in ep_dict )
		eps.update( (name, (module, conf_base))
			for name, module in ep_dict.viewitems() )
		for ep_name, (ep_module, subconf) in eps.viewitems():
			if ep_name[0] == '_':
				log.debug( 'Skipping {} enty point,'
					' prefixed by underscore: {}'.format(ep_type, ep_name) )
			subconf.rebase(conf_base) # fill in "_default" collector parameters
			if enabled:
				if ep_name in enabled: subconf['enabled'] = True
				else: subconf['enabled'] = False
			if disabled and ep_name in disabled: subconf['enabled'] = False
			if subconf.get('enabled', True):
				log.debug('Loading {}: {}'.format(ep_type, ep_name))
				try: obj = getattr(ep_module.load(), ep_type)(subconf)
				except Exception as err:
					log.exception('Failed to load/init {} ({}): {}'.format(ep_type, ep_name, err))
					subconf.enabled = False
					obj = None
				if subconf.get('enabled', True): objects[ep_name] = obj
				else:
					log.debug(( '{} {} (entry point: {})'
						' was disabled after init' ).format(ep_type.title(), obj, ep_name))
		if ep_type != 'processor' and not objects:
			log.fatal('No {}s were properly enabled/loaded, bailing out'.format(ep_type))
			sys.exit(1)
		log.debug('{}: {}'.format(ep_key.title(), objects))

	loop = dict( (ep.name, ep) for ep in
		pkg_resources.iter_entry_points('graphite_metrics.loops') )
	conf = AttrDict(**cfg.loop)
	if 'debug' not in conf: conf.debug = cfg.debug
	loop = loop[cfg.loop.name].load().loop(conf)

	collectors, processors, sinks = it.imap( op.itemgetter(2),
		op.itemgetter('collectors', 'processors', 'sinks')(ep_conf) )
	log.debug(
		'Starting main loop: {} ({} collectors, {} processors, {} sinks)'\
		.format(loop, len(collectors), len(processors), len(sinks)) )
	loop.start(collectors, processors, sinks)
Exemplo n.º 9
0
import guppy
from guppy.heapy import Remote
Remote.on()
Exemplo n.º 10
0
def main(argv=None):
    import argparse
    parser = argparse.ArgumentParser(
        description='Discoverable network manager WebUI.')

    parser.add_argument('-p',
                        '--httpd-port',
                        metavar='port',
                        type=int,
                        default=8080,
                        help='Port to bind WebUI to (default: %(default)s).')
    parser.add_argument(
        '--httpd-static',
        metavar='path',
        default=join(dirname(__file__), 'static'),
        help='Path to static web assets (default: %(default)s).')
    parser.add_argument('--httpd-templates',
                        metavar='path',
                        default=join(dirname(__file__), 'templates'),
                        help='Path to templates (default: %(default)s).')

    parser.add_argument(
        '--secrets-file',
        metavar='path',
        default=join(dirname(__file__), 'secrets.bencode'),
        help=
        'Path to file were all secrets will be stored (default: %(default)s).')

    parser.add_argument(
        '-w',
        '--watchdog-ping-interval',
        metavar='seconds',
        type=float,
        default=10,
        help=
        'Interval between checks if main process is responsive (default: %(default)s).'
    )

    # XXX: add manhole
    parser.add_argument(
        '-l',
        '--only-logger',
        metavar='logger_name',
        help='Only display logging stream from specified'
        ' logger name (example: nm.core) and errors from twisted logger.')
    parser.add_argument(
        '--debug-memleaks',
        action='store_true',
        help=
        'Import guppy and enable its manhole to debug memleaks (requires guppy module).'
    )
    parser.add_argument(
        '--debug-deferreds',
        action='store_true',
        help=
        'Set debug mode for deferred objects to produce long tracebacks for unhandled errbacks.'
    )
    parser.add_argument('--debug',
                        action='store_true',
                        help='Verbose operation mode.')
    parser.add_argument(
        '--noise',
        action='store_true',
        help='In addition to --debug also dump e.g. all sent/received messages.'
    )
    opts = parser.parse_args(argv or sys.argv[1:])

    # Forked watchdog pid makes sure that twisted reactor isn't stuck (on e.g. blocking call)
    pid = os.fork()
    if not pid:
        watchdog(opts.watchdog_ping_interval)
        sys.exit(0)
    watchdog_reply_setup(pid)

    from nm_wifi_webui.webui import WebUI
    from nm_wifi_webui.nm import NMInterface
    from nm_wifi_webui import utils

    from twisted.internet import reactor, defer
    from twisted.web import server
    from twisted.application import strports, service

    import logging

    log = dict()
    if opts.only_logger:
        log['one_logger'] = opts.only_logger
    utils.init_logging(debug=opts.debug, noise=opts.noise, **log)
    log = logging.getLogger('interface.core')

    if opts.debug_memleaks:
        import guppy
        from guppy.heapy import Remote
        Remote.on()
    if opts.debug_deferreds:
        defer.Deferred.debug = True

    app = service.MultiService()
    webui = WebUI(static_path=opts.httpd_static,
                  templates_path=opts.httpd_templates)
    webui.putChild('', webui)

    site = server.Site(webui)
    site.noisy = False
    site.displayTracebacks = False
    strports.service('tcp:{}'.format(opts.httpd_port),
                     site).setServiceParent(app)

    nm = NMInterface(opts.secrets_file, webui)
    nm.setServiceParent(app)

    app.startService()
    reactor.addSystemEventTrigger('before', 'shutdown', app.stopService)

    log.debug('Starting...')
    reactor.run()
    log.debug('Finished (exit code: %s)', utils.exit_code)

    return utils.exit_code
Exemplo n.º 11
0
 def Disable():
     """Disable remote debugging."""
     Remote.off()
Exemplo n.º 12
0
 def Enable():
     """Enable remote debugging."""
     Remote.on()
def main(argv=None):
	import argparse
	parser = argparse.ArgumentParser(
		description='Discoverable network manager WebUI.')

	parser.add_argument('-p', '--httpd-port',
		metavar='port', type=int, default=8080,
		help='Port to bind WebUI to (default: %(default)s).')
	parser.add_argument('--httpd-static',
		metavar='path', default=join(dirname(__file__), 'static'),
		help='Path to static web assets (default: %(default)s).')
	parser.add_argument('--httpd-templates',
		metavar='path', default=join(dirname(__file__), 'templates'),
		help='Path to templates (default: %(default)s).')

	parser.add_argument('--secrets-file',
		metavar='path', default=join(dirname(__file__), 'secrets.bencode'),
		help='Path to file were all secrets will be stored (default: %(default)s).')

	parser.add_argument('-w', '--watchdog-ping-interval',
		metavar='seconds', type=float, default=10,
		help='Interval between checks if main process is responsive (default: %(default)s).')

	# XXX: add manhole
	parser.add_argument('-l', '--only-logger', metavar='logger_name',
		help='Only display logging stream from specified'
			' logger name (example: nm.core) and errors from twisted logger.')
	parser.add_argument('--debug-memleaks', action='store_true',
		help='Import guppy and enable its manhole to debug memleaks (requires guppy module).')
	parser.add_argument('--debug-deferreds', action='store_true',
		help='Set debug mode for deferred objects to produce long tracebacks for unhandled errbacks.')
	parser.add_argument('--debug', action='store_true', help='Verbose operation mode.')
	parser.add_argument('--noise', action='store_true',
		help='In addition to --debug also dump e.g. all sent/received messages.')
	opts = parser.parse_args(argv or sys.argv[1:])

	# Forked watchdog pid makes sure that twisted reactor isn't stuck (on e.g. blocking call)
	pid = os.fork()
	if not pid:
		watchdog(opts.watchdog_ping_interval)
		sys.exit(0)
	watchdog_reply_setup(pid)

	from nm_wifi_webui.webui import WebUI
	from nm_wifi_webui.nm import NMInterface
	from nm_wifi_webui import utils

	from twisted.internet import reactor, defer
	from twisted.web import resource, server
	from twisted.application import strports, service
	from twisted.python.filepath import FilePath

	import logging

	log = dict()
	if opts.only_logger: log['one_logger'] = opts.only_logger
	utils.init_logging(debug=opts.debug, noise=opts.noise, **log)
	log = logging.getLogger('interface.core')

	if opts.debug_memleaks:
		import guppy
		from guppy.heapy import Remote
		Remote.on()
	if opts.debug_deferreds: defer.Deferred.debug = True

	app = service.MultiService()
	webui = WebUI(static_path=opts.httpd_static, templates_path=opts.httpd_templates)
	webui.putChild('', webui)

	site = server.Site(webui)
	site.noisy = False
	site.displayTracebacks = False
	strports.service('tcp:{}'.format(opts.httpd_port), site).setServiceParent(app)

	nm = NMInterface(opts.secrets_file, webui)
	nm.setServiceParent(app)

	app.startService()
	reactor.addSystemEventTrigger('before', 'shutdown', app.stopService)

	log.debug('Starting...')
	reactor.run()
	log.debug('Finished (exit code: %s)', utils.exit_code)

	return utils.exit_code