def init_logging(context): session_files = context["session_files"] # Setup logging log_filename = session_files.session_dir / "singt.log" logfile = open(log_filename, 'w') logtargets = [] # Set up the log observer for stdout. logtargets.append( FilteringLogObserver( textFileLogObserver(sys.stdout), predicates=[LogLevelFilterPredicate(LogLevel.debug)] # was: warn ) ) # Set up the log observer for our log file. "debug" is the highest possible level. logtargets.append( FilteringLogObserver( textFileLogObserver(logfile), predicates=[LogLevelFilterPredicate(LogLevel.debug)] ) ) # Direct the Twisted Logger to log to both of our observers. globalLogBeginner.beginLoggingTo(logtargets) # ASCII-art title title = art.text2art("Singt Client") log.info("\n"+title)
def start(self, console, logfile): """Configure and start logging based on user preferences Args: console (bool): Console logging enabled logfile (str): Logfile path """ # Log to console option. if console: globalLogBeginner.beginLoggingTo( [textFileLogObserver(sys.stdout)], ) return # Check the file is valid and can be opened in append mode if os.path.exists(logfile) and not os.path.isfile(logfile): print "Logfile %s is not a valid file: exiting." % logfile exit(1) try: f = open(logfile, 'a') except IOError: print "Can't open logfile %s: exiting." % logfile exit(1) # Begin logging to the file. globalLogBeginner.beginLoggingTo([ textFileLogObserver(f), ], redirectStandardIO=False)
def do_rotate_logs(basefile, type): global observers if os.path.exists(basefile): if os.path.getsize(basefile) > 1000: for c in range(19, 0, -1): filename_cur = f"{basefile}.1" if c == 1 and os.path.exists(filename_cur): with open(filename_cur) as src, gzip.open( f"{filename_cur}.gz", "wb") as dst: dst.writelines(src) os.remove(filename_cur) filename_cur = f"{basefile}.{c}.gz" filename_next = f"{basefile}.{c+1}.gz" if os.path.exists(filename_cur): os.rename(filename_cur, filename_next) os.rename(basefile, f"{basefile}.1") if type == "json": globalLogPublisher.removeObserver(observers["json"]) observers["json"] = jsonFileLogObserver( io.open("usr/log/yombo.json", "a")) globalLogPublisher.addObserver(observers["json"]) globalLogPublisher.addObserver( jsonFileLogObserver(io.open(basefile, "a"))) elif type == "text": observers["text"] = textFileLogObserver( io.open("usr/log/yombo.text", "a")) globalLogPublisher.addObserver(observers["text"]) globalLogPublisher.removeObserver(textFileLogObserver()) globalLogPublisher.addObserver( textFileLogObserver(io.open(basefile, "a")))
def __init__(self, config): # Dictionary to hold active clients. self.clients = {} # Set up logging. # TODO: Use config parameters here. self.log = Logger() # Logging target. log_observer = textFileLogObserver(sys.stdout) # Filter out levels to the specified severity. logging_level_predicate = [LogLevelFilterPredicate(LogLevel.debug)] # Set up an Observer to actually perform the filtering. log_filter = FilteringLogObserver(textFileLogObserver(sys.stdout), predicates=logging_level_predicate) # And register global logging for the filtering observer. globalLogBeginner.beginLoggingTo([log_filter]) # Passed-in game configuration. self.configuration = config # Game data. self.game = game.GameData() # Init main game loop. self.game_loop = task.LoopingCall(self.GameLoop) self.game_loop.start(30) # Holds a (cancelable! - just "self.shutdown.cancel()") callback for shutting down the server as needed. self.shutdown = None
def do_rotate_logs(basefile, type): global observers if os.path.exists(basefile): if os.path.getsize(basefile) > 1000: for c in range(19, 0, -1): filename_cur = "%s.1" % basefile if c == 1 and os.path.exists(filename_cur): with open(filename_cur) as src, gzip.open( '%s.gz' % filename_cur, 'wb') as dst: dst.writelines(src) os.remove(filename_cur) filename_cur = "%s.%s.gz" % (basefile, c) filename_next = "%s.%s.gz" % (basefile, c + 1) if os.path.exists(filename_cur): os.rename(filename_cur, filename_next) os.rename(basefile, "%s.1" % basefile) if type == 'json': globalLogPublisher.removeObserver(observers['json']) observers['json'] = jsonFileLogObserver( io.open("usr/log/yombo.json", "a")) globalLogPublisher.addObserver(observers['json']) globalLogPublisher.addObserver( jsonFileLogObserver(io.open(basefile, "a"))) elif type == 'text': observers['text'] = textFileLogObserver( io.open("usr/log/yombo.text", "a")) globalLogPublisher.addObserver(observers['text']) globalLogPublisher.removeObserver(textFileLogObserver()) globalLogPublisher.addObserver( textFileLogObserver(io.open(basefile, "a")))
def run_client(address, username): title = art.text2art("Singt") print(title) # Setup logging logfile = open(f"client-{username}.log", 'w') logtargets = [] # Set up the log observer for stdout. logtargets.append( FilteringLogObserver( textFileLogObserver(sys.stdout), predicates=[LogLevelFilterPredicate(LogLevel.debug)] # was: warn )) # Set up the log observer for our log file. "debug" is the highest possible level. logtargets.append( FilteringLogObserver( textFileLogObserver(logfile), predicates=[LogLevelFilterPredicate(LogLevel.debug)])) # Direct the Twisted Logger to log to both of our observers. globalLogBeginner.beginLoggingTo(logtargets) # Start a logger with a namespace for a particular subsystem of our application. log = Logger("client") # TCP # === point = TCP4ClientEndpoint(reactor, address, 1234) client = TCPClient(username) d = connectProtocol(point, client) def err(failure): print("An error occurred:", failure) d.addErrback(err) # UDP # === # 0 means any port, we don't care in this case udp_client = UDPClient(address, 12345) reactor.listenUDP(0, udp_client) # Reactor # ======= print("Running reactor") reactor.run() print("Finished.")
def startLogging(console=True, filepath=None): ''' Starts the global Twisted logger subsystem with maybe stdout and/or a file specified in the config file ''' global logLevelFilterPredicate observers = [] if console: observers.append( FilteringLogObserver(observer=textFileLogObserver(sys.stdout), predicates=[logLevelFilterPredicate] )) if filepath is not None and filepath != "": observers.append( FilteringLogObserver(observer=textFileLogObserver(open(filepath,'a')), predicates=[logLevelFilterPredicate] )) globalLogBeginner.beginLoggingTo(observers)
def start_logging(level=LogLevel.info): observers = [] predicate = LogLevelFilterPredicate(defaultLogLevel=level) observers.append(FilteringLogObserver(observer=textFileLogObserver(sys.stdout), predicates=[predicate])) globalLogBeginner.beginLoggingTo(observers)
def main(reactor, *descriptions): log = Logger() globalLogBeginner.beginLoggingTo([textFileLogObserver(sys.stdout)]) endpointObjects = [ endpoints.clientFromString(reactor, description) for description in descriptions ] hostPorts = [(endpoint._host, endpoint._port) for endpoint in endpointObjects] pool = threadpool.ThreadPool(minthreads=1, maxthreads=1, name="persiter") persister = Persists(reactor, pool) reactor.addSystemEventTrigger("before", "shutdown", persister.stop) persister.start("log.sqlite", hostPorts) analyzer = AnalyzesText(persister) factory = EncodingCollectionFactory(reactor, random.SystemRandom(), analyzer) for (host, port), endpoint in zip(hostPorts, endpointObjects): try: protocol = yield endpoint.connect(factory) except Exception: log.failure("Could not connect to {host}:{port}", host=host, port=port) raise protocol.addr = (host, port) defer.returnValue(defer.Deferred())
def __init__(self, accessList): self.logger = Logger(observer=textFileLogObserver(sys.stdout)) self.accessList = [nick.lower() for nick in accessList] if not os.path.exists(self.magicFile): self.logger.info("Creating magic file") try: with open(self.magicFile, "a"): pass except Exception as ex: self.logger.error("Unable to create magic file! {0}".format( ex.message)) reactor.stop() self.markovGenerator = pymarkov.MarkovChainGenerator(self.magicFile) self.channels = [] self.channelPhrasers = {} self.logger.debug("Discord initialized") # Maybe add hook/plugin system here? self.commands = Commands.Commands(self)
def setup(self, level='warn', namespace_levels=None, text_file=sys.stderr, time_format='%H:%M:%S.%f', handle_stdlib=True, stdlib_level='notset', stdlib_prefix='stdlib.'): """ Initiates the twisted.logger system: - level: default log level as a string (ie: 'warn', 'info', ....). - namespace_levels: a dict of namespaces/log level names. - text_file: where to write the log to. - time_format: as supported by datetime.strftime. - handle_stdlib: True/False. - stdlib_level: level name, above which stdlib logging is handled. - stdlib_prefix: added to stdlib logger name, used as namespace. """ file_observer = textFileLogObserver(text_file, timeFormat=time_format) self._predicate = LogLevelFilterPredicate( defaultLogLevel=LogLevel.levelWithName(level), ) if namespace_levels: for namespace, level_name in namespace_levels.items(): level = LogLevel.levelWithName(level_name) self._predicate.setLogLevelForNamespace(namespace, level) globalLogBeginner.beginLoggingTo([self._filtered_observer(file_observer)]) if handle_stdlib: self._handle_stdlib(stdlib_level, stdlib_prefix)
def main(): if not HAS_LDAP3: raise RuntimeError( "Missing required 'ldap' module (pip install ldap3).") parser = argparse.ArgumentParser( prog='openldap_exporter', description='Prometheus OpenLDAP exporter') parser.add_argument('--config', type=argparse.FileType('r'), help='configuration file', required=True) arguments = parser.parse_args() configs = yaml.load(arguments.config) arguments.config.close() output = textFileLogObserver(sys.stderr, timeFormat='') globalLogBeginner.beginLoggingTo([output]) # Setup web client metrics = MetricsPage(configs['clients']) root = RootPage() root.putChild(b'metrics', metrics) site = QuietSite(root) endpoint = serverFromString(reactor, "tcp:port=" + str(configs['server_port'])) endpoint.listen(site) reactor.run()
def test_old_style(self): """ L{Logger} handles old style log strings. """ observer = LogPublisher() observed = [] observer.addObserver(observed.append) sio = StringIO() observer.addObserver(textFileLogObserver(sio)) logger = Logger(observer=observer) index = 0 logger.info("test") self.assertEqual(observed[index]["log_level"], LogLevel.info) self.assertEqual(observed[index]["log_format"], u"{msg}") self.assertEqual(observed[index]["msg"], u"test") self.assertEqual(sio.getvalue().splitlines()[index].split("#info] ")[1], "test") index += 1 logger.info("test {}") self.assertEqual(observed[index]["log_level"], LogLevel.info) self.assertEqual(observed[index]["log_format"], u"{msg}") self.assertEqual(observed[index]["msg"], u"test {}") self.assertEqual(sio.getvalue().splitlines()[index].split("#info] ")[1], "test {}") index += 1 logger.info("test {foo}") self.assertEqual(observed[index]["log_level"], LogLevel.info) self.assertEqual(observed[index]["log_format"], u"{msg}") self.assertEqual(observed[index]["msg"], u"test {foo}") self.assertEqual(sio.getvalue().splitlines()[index].split("#info] ")[1], "test {foo}")
def init(outFile): level = levels[config.LOG_LEVEL] predicate = LogLevelFilterPredicate(defaultLogLevel=level) observer = FilteringLogObserver(textFileLogObserver(outFile=outFile), [predicate]) globalLogPublisher.addObserver(observer) log.info("Start logging with {l}", l=level)
def setUp(self): """ Override some things in mailmail, so that we capture C{stdout}, and do not call L{reactor.stop}. """ self.out = StringIO() # Override the mailmail logger, so we capture stderr output from twisted.logger import textFileLogObserver, Logger logObserver = textFileLogObserver(self.out) self.patch(mailmail, "_log", Logger(observer=logObserver)) self.host = None self.options = None self.ident = None # Override mailmail.sendmail, so we don't call reactor.stop() def sendmail(host, options, ident): self.host = host self.options = options self.ident = ident return smtp.sendmail( host, options.sender, options.to, options.body, reactor=self.memoryReactor, ) self.patch(mailmail, "sendmail", sendmail)
def cli(): parser = argparse.ArgumentParser(prog=__version__.package) parser.add_argument('--version', action='version', version=__version__.public()) parser.add_argument('--openhab', default=default_openhab, type=URL.from_text, help='OpenHAB URL, default is {}'.format( default_openhab.to_text())) parser.add_argument( '--endpoint', default=default_endpoint, help= 'Twisted endpoint descriptor for internal web server to listen on, default is {}' .format(default_endpoint)) options = parser.parse_args() log = Logger() output = textFileLogObserver(sys.stderr, timeFormat='') globalLogBeginner.beginLoggingTo([output]) log.debug('Listening on {endpoint:}', endpoint=options.endpoint) log.debug('Connecting to {openhab:}', openhab=options.openhab.to_text()) metrics = MetricsPage(reactor, options.openhab, creds) metricsThings = MetricsThingPage(reactor, options.openhab, creds) root = RootPage() root.putChild(b'metrics', metrics) root.putChild(b'metric-things', metricsThings) site = Site(root) server = serverFromString(reactor, options.endpoint) server.listen(site) reactor.run()
def init(outFile): level = levels[config.LOG_LEVEL] predicate = LogLevelFilterPredicate(defaultLogLevel=level) observer = FilteringLogObserver(textFileLogObserver(outFile=outFile), [predicate]) observer._encoding = "utf-8" globalLogPublisher.addObserver(observer) log.info("Start logging with {l}", l=level)
def __init__(self, accessList): self.logger = Logger(observer=textFileLogObserver(sys.stdout)) self.accessList = [nick.lower() for nick in accessList] if not os.path.exists(self.magicFile): self.logger.info("Creating magic file") try: with open(self.magicFile, "a"): pass except Exception as ex: self.logger.error("Unable to create magic file! {0}".format(ex.message)) reactor.stop() self.markovGenerator = pymarkov.MarkovChainGenerator(self.magicFile) self.channels = [] self.channelPhrasers = {} self.logger.debug("Discord initialized") # Maybe add hook/plugin system here? self.commands = Commands.Commands(self)
def test_utf8(self): """ L{Logger} handles utf8 log strings and format args. """ observer = LogPublisher() observed = [] observer.addObserver(observed.append) sio = StringIO() observer.addObserver(textFileLogObserver(sio)) logger = Logger(observer=observer) index = 0 logger.info("t\xc3\xa9st") self.assertEqual(observed[index]["log_level"], LogLevel.info) self.assertEqual(observed[index]["log_format"], u"{msg}") self.assertEqual(observed[index]["msg"], u"t\xe9st") self.assertEqual(sio.getvalue().splitlines()[index].split("#info] ")[1], "t\xc3\xa9st") index += 1 logger.info("{str}", str="t\xc3\xa9st") self.assertEqual(observed[index]["log_level"], LogLevel.info) self.assertEqual(observed[index]["log_format"], u"{str}") self.assertEqual(observed[index]["str"], u"t\xe9st") self.assertEqual(sio.getvalue().splitlines()[index].split("#info] ")[1], "t\xc3\xa9st") index += 1 logger.info("T\xc3\xa9st {str}", str="t\xc3\xa9st") self.assertEqual(observed[index]["log_level"], LogLevel.info) self.assertEqual(observed[index]["log_format"], u"T\xe9st {str}") self.assertEqual(observed[index]["str"], u"t\xe9st") self.assertEqual(sio.getvalue().splitlines()[index].split("#info] ")[1], "T\xc3\xa9st t\xc3\xa9st")
def logger(): try: dir = CONFIG.get("honeypot", "log_path") except NoOptionError: dir = "log" logfile = CowrieDailyLogFile("cowrie.log", dir) return textFileLogObserver(logfile, timeFormat='%Y-%m-%dT%H:%M:%S.%f%z')
def main(accessibility=False): pygame.mixer.pre_init(frequency=44100, buffer=1024) pygame.init() pygame.font.init() globalLogBeginner.beginLoggingTo([textFileLogObserver(sys.stdout)]) display = Display(accessibility=accessibility) display.setView('LoginView') display.init()
def __init__(self, commandname, maxbackups=3): log_name = commandname + ".log" log_dir = os.path.join(fs.adirs.user_log_dir, "processes") if not os.path.isdir(log_dir): os.makedirs(log_dir) log_name = os.path.join(log_dir, log_name) _backup_logs(log_name, maxbackups) self.log = Logger(observer=textFileLogObserver(io.open(log_name, "w")), namespace="") super(LoggingProcessProtocol, self).__init__()
def getLogger(*args, **kwargs): if os.environ.get('SOLEDAD_USE_PYTHON_LOGGING'): import logging return logging.getLogger(__name__) if os.environ.get('SOLEDAD_LOG_TO_STDOUT'): kwargs({'observer': textFileLogObserver(sys.stdout)}) return SyslogLogger(*args, **kwargs)
def _getLogObserver(self): """ Create a log observer to be added to the logging system before running this application. """ if self._logfilename == '-' or not self._logfilename: logFile = sys.stdout else: logFile = logfile.LogFile.fromFullPath(self._logfilename) return logger.textFileLogObserver(logFile)
def start_logging(session_files): logfile = open(session_files.session_dir / "server.log", 'w') logtargets = [] # Set up the log observer for stdout. logtargets.append( FilteringLogObserver( textFileLogObserver(sys.stdout), predicates=[LogLevelFilterPredicate(LogLevel.debug)] # was: warn )) # Set up the log observer for our log file. "debug" is the highest possible level. logtargets.append( FilteringLogObserver( textFileLogObserver(logfile), predicates=[LogLevelFilterPredicate(LogLevel.debug)])) # Direct the Twisted Logger to log to both of our observers. globalLogBeginner.beginLoggingTo(logtargets)
def logger(): dir = CONFIG.get("honeypot", "log_path", fallback="log") logfile = CowrieDailyLogFile("cowrie.log", dir) # use Z for UTC (Zulu) time, it's shorter. if 'TZ' in environ and environ['TZ'] == 'UTC': timeFormat = '%Y-%m-%dT%H:%M:%S.%fZ' else: timeFormat = '%Y-%m-%dT%H:%M:%S.%f%z' return textFileLogObserver(logfile, timeFormat=timeFormat)
def logger(): dir = CowrieConfig.get("honeypot", "log_path", fallback="log") logfile = CowrieDailyLogFile("cowrie.log", dir) # use Z for UTC (Zulu) time, it's shorter. if "TZ" in environ and environ["TZ"] == "UTC": timeFormat = "%Y-%m-%dT%H:%M:%S.%fZ" else: timeFormat = "%Y-%m-%dT%H:%M:%S.%f%z" return textFileLogObserver(logfile, timeFormat=timeFormat)
def startLogging(cls, logOutput, levelStr='debug'): if isinstance(logOutput, str): dir = os.path.dirname(logOutput) if dir and not os.path.exists(dir): os.makedirs(dir) logOutput = open(logOutput, 'a+') level = LogLevel.levelWithName(levelStr) predicate = LogLevelFilterPredicate(defaultLogLevel=level) observer = FilteringLogObserver(textFileLogObserver(outFile=logOutput), [predicate]) globalLogPublisher.addObserver(observer)
def __init__(self, reactor, config_filename): self._network = None self._proc = None self._reactor = reactor self._config_filename = config_filename self.connections = dict() with open(config_filename) as f: self.config = json.load(f) f = open(self.core_config["log_file"], "a") globalLogPublisher.addObserver(textFileLogObserver(f)) self.api = ApiProxy(self._reactor) self.server_factory = pb.PBServerFactory(self.api)
def main(): parser = ArgumentParser() parser.execute() log = Logger() globalLogBeginner.beginLoggingTo([textFileLogObserver(sys.stdout)]) log.info("Starting cards-against-humanity server version {major}.{minor}.{revision}", major=version.MAJOR, minor=version.MINOR, revision=version.REVISION) endpoint = TCP4ServerEndpoint(reactor, parser.port) endpoint.listen(ServerFactory(parser.black_cards, parser.database)) reactor.run()
def init_logging(log_level): """ Initialise the logging by adding an observer to the global log publisher. :param str log_level: The minimum log level to log messages for. """ log_level_filter = LogLevelFilterPredicate( LogLevel.levelWithName(log_level)) log_level_filter.setLogLevelForNamespace( 'twisted.web.client._HTTP11ClientFactory', LogLevel.warn) log_observer = FilteringLogObserver(textFileLogObserver(sys.stdout), [log_level_filter]) globalLogPublisher.addObserver(log_observer)
def startLogging(file: TextIO = sys.stdout) -> None: """ Start Twisted logging system. """ fileObserver = textFileLogObserver(file) filteringObserver = FilteringLogObserver( fileObserver, (globalLogLevelPredicate,) ) globalLogBeginner.beginLoggingTo( [filteringObserver], redirectStandardIO=False, )
def print_log(path=None, output_stream=None): if path is None: from pychron.paths import paths path = os.path.join(paths.log_dir, 'pps.log.json') if output_stream is None: output_stream = sys.stdout elif isinstance(output_stream, (str, six.text_type)): output_stream = io.open(output_stream, 'w') output = textFileLogObserver(output_stream) for event in eventsFromJSONLogFile(io.open(path)): output(event)
def init_logging(log_level): """ Initialise the logging by adding an observer to the global log publisher. :param str log_level: The minimum log level to log messages for. """ log_level_filter = LogLevelFilterPredicate( LogLevel.levelWithName(log_level)) log_level_filter.setLogLevelForNamespace( 'twisted.web.client._HTTP11ClientFactory', LogLevel.warn) log_observer = FilteringLogObserver( textFileLogObserver(sys.stdout), [log_level_filter]) globalLogPublisher.addObserver(log_observer)
def print_log(path=None, output_stream=None): if path is None: from pychron.paths import paths path = os.path.join(paths.log_dir, 'pps.log.json') if output_stream is None: output_stream = sys.stdout elif isinstance(output_stream, (str, unicode)): output_stream = io.open(output_stream, 'w') output = textFileLogObserver(output_stream) for event in eventsFromJSONLogFile(io.open(path)): output(event)
def main(reactor, server_endpoint_string, handoff_endpoint_string): logger.globalLogBeginner.beginLoggingTo( [logger.textFileLogObserver(sys.stderr)]) server_endpoint = endpoints.serverFromString( reactor, server_endpoint_string) server_port = yield server_endpoint.listen(AlwaysAbortFactory()) handoff_factory = HandoffFactory(server_port, describe_socket(server_port.socket)) reactor.removeReader(server_port) handoff_endpoint = endpoints.serverFromString( reactor, handoff_endpoint_string) yield handoff_endpoint.listen(handoff_factory) yield defer.Deferred()
def print_log(fp, user=None, n=1, dest=sys.stdout): output = textFileLogObserver(dest) events, startups = [], [] for event in eventsFromJSONLogFile(io.open(fp)): events.append(event) if event.get('log_format') == 'Start client': startups.append(event.get('log_time')) last_start_time = 0 if len(startups) >= n: last_start_time = startups[-n] elif startups: last_start_time = startups[0] for event in events: fitting_timestamp = (last_start_time <= event.get('log_time')) fitting_name = (not user or user == event.get('player_name')) if fitting_timestamp and fitting_name: output(event)
def _getLogObserver(self): """ Create and return a suitable log observer for the given configuration. The observer will go to syslog using the prefix C{_syslogPrefix} if C{_syslog} is true. Otherwise, it will go to the file named C{_logfilename} or, if C{_nodaemon} is true and C{_logfilename} is C{"-"}, to stdout. @return: An object suitable to be passed to C{log.addObserver}. """ if self._syslog: # FIXME: Requires twisted.python.syslog to be ported to Py3 # https://twistedmatrix.com/trac/ticket/7957 from twisted.python import syslog return syslog.SyslogObserver(self._syslogPrefix).emit if self._logfilename == '-': if not self._nodaemon: sys.exit('Daemons cannot log to stdout, exiting!') logFile = sys.stdout elif self._nodaemon and not self._logfilename: logFile = sys.stdout else: if not self._logfilename: self._logfilename = 'twistd.log' logFile = logfile.LogFile.fromFullPath(self._logfilename) try: import signal except ImportError: pass else: # Override if signal is set to None or SIG_DFL (0) if not signal.getsignal(signal.SIGUSR1): def rotateLog(signal, frame): from twisted.internet import reactor reactor.callFromThread(logFile.rotate) signal.signal(signal.SIGUSR1, rotateLog) return logger.textFileLogObserver(logFile)
def handle(self, *args, **options): globalLogBeginner.beginLoggingTo([textFileLogObserver(sys.stderr)], redirectStandardIO=False) react(updateFeeds, (options['max_fetch'],))
def main(reactor): globalLogBeginner.beginLoggingTo([textFileLogObserver(sys.stderr)]) return zk.connect().addCallback(zkconnected, reactor)
else: log.info("Unrecognized command {command}", command=command) def log_and_exit(ret, reactor): log.failure("Critical failure, terminating application") reactor.stop() def register(root, reactor): log.info("Registering app for bot") return root.callRemote("register", API(reactor)) if __name__ == "__main__": from twisted.internet import reactor with open(environ["CONFIG"]) as f: config.update(json.load(f)) f = open(config["core"]["log_file"], "a") globalLogPublisher.addObserver(textFileLogObserver(f)) endpoint = endpoints.StandardIOEndpoint(reactor) factory = pb.PBClientFactory() d = endpoint.listen(factory) @d.addCallback def initialize(_): d = factory.getRootObject() d.addCallback(register, reactor) d.addErrback(log_and_exit, reactor) return reactor.run()
def startLogging(fileobj, level=LogLevel.debug): fileObserver = textFileLogObserver(fileobj) predicate = LogLevelFilterPredicate(defaultLogLevel=level) observers = [ FilteringLogObserver(observer=fileObserver, predicates=[predicate]) ] globalLogBeginner.beginLoggingTo(observers)
def audit(): outFile = sys.stdout if config.LOG_DIRECTORY == "stdout" else daily("audit.log") observer = textFileLogObserver(outFile=outFile) observer._encoding = "utf-8" return Logger(observer=observer)
def main_loop(): globalLogBeginner.beginLoggingTo([textFileLogObserver(open("log/esb.log", "a"))]) reactor.run()
def StderrLogger(): return textFileLogObserver(sys.stderr)
# print 'Response phrase:', response.phrase # print 'Response headers:' # print pformat(list(response.headers.getAllRawHeaders())) finished = Deferred() response.deliverBody(RestHandle(finished, self.event_handler)) return finished d.addCallbacks(cbRequest, cbFail) return d if __name__ == '__main__': import sys from twisted.logger import globalLogBeginner, textFileLogObserver observers = [textFileLogObserver(sys.stdout)] globalLogBeginner.beginLoggingTo(observers) cl = True # log.startLogging(sys.stdout) log = Logger() def set_temp(obj): temp = obj.devices( path='thermostats/o4WARbb6TBa0Z81uC9faoLuE3_EunExt', target_temperature_c=23) temp.addCallback(result, 'temp set_request') def result(data, prefix=''): log.info('{prefix} request result: {data}', prefix=prefix, data=data) # reactor.stop() # @UndefinedVariable
# the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Secant is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Secant. If not, see <http://www.gnu.org/licenses/>. import sys from twisted.internet import reactor from twisted.internet import endpoints from twisted.logger import globalLogBeginner from twisted.logger import textFileLogObserver from twisted.logger import Logger from secant import config from secant import TacacsProtocolFactory output = textFileLogObserver(sys.stdout) globalLogBeginner.beginLoggingTo([output]) factory = TacacsProtocolFactory() endpoint = endpoints.serverFromString(reactor, 'tcp:port=49') endpoint.listen(factory) reactor.run()
from __future__ import absolute_import, division, print_function import sys from gaspocket.bot import run from twisted.internet.task import react from twisted.logger import globalLogPublisher, textFileLogObserver globalLogPublisher.addObserver(textFileLogObserver(sys.stdout)) if __name__ == '__main__': react(run, [])
elif payload == b'DECREASE': self.radio.volumeDecrease() else: try: volume = int(payload) self.radio.setVolume(volume) except ValueError: pass self.periodicVolumeStatus() # use the serial number embedded into the Raspberry Pi as a unique identifier cpuinfo_re = re.compile(br'\nSerial\s+:\s+([0-9a-f]+)\s*\n') with open('/proc/cpuinfo', 'rb') as cpuinfo: data = cpuinfo.read() match = cpuinfo_re.search(data) if not match: sys.stderr.write('Cannot read serial number') sys.exit(1) serial = match.group(1).decode('ascii') with open('/opt/rpiwr/etc/config.json','rb') as c: config = json.loads(c.read().decode('utf-8')) try: output = textFileLogObserver(sys.stderr, timeFormat="") globalLogBeginner.beginLoggingTo([output]) r = Radio(serial, config) reactor.run() finally: GPIO.cleanup()