class PrintLogThread(threading.Thread): ''' All file printing access from one thread. Receives information when its placed on the passed queue. Called from one location: Output.handlePrint. Does not close the file: this happens in Output.endLogging. This simplifies the operation of this class, since it only has to concern itself with the queue. The path must exist before DailyLog runs for the first time. ''' def __init__(self, path, queue, name): threading.Thread.__init__(self) self.queue = queue self.writer = DailyLogFile(name, path) # Don't want this to float around if the rest of the system goes down self.setDaemon(True) def run(self): while True: result = self.queue.get(block=True) try: writable = json.dumps(result) self.writer.write(writable + '\n') self.writer.flush() except: pass self.queue.task_done()
def rotate(self): """Rotate the current logfile. Also remove extra entries and compress the last ones. """ # Rotate the log daily. DailyLogFile.rotate(self) # Remove 'extra' rotated log files. logs = self.listLogs() for log_path in logs[self.maxRotatedFiles:]: os.remove(log_path) # Refresh the list of existing rotated logs logs = self.listLogs() # Skip compressing if there are no files to be compressed. if len(logs) <= self.compressLast: return # Compress last log files. for log_path in logs[-self.compressLast:]: # Skip already compressed files. if log_path.endswith('bz2'): continue self._compressFile(log_path)
def rotate(self): DailyLogFile.rotate(self) dir = os.path.dirname(self.path) files = os.listdir(dir) for file in files: if file.startswith("honeypy.log."): os.remove(os.path.join(dir, file))
def write(self, data): if not self.enableRotation: if not os.path.exists(self.path): self.reopen() else: path_stat = os.stat(self.path) fd_stat = os.fstat(self._file.fileno()) if not (path_stat.st_ino == fd_stat.st_ino and path_stat.st_dev == fd_stat.st_dev): self.reopen() DailyLogFile.write(self, data)
def __init__(self, name, directory, defaultMode=None, maxRotatedFiles=None, compressLast=None): DailyLogFile.__init__(self, name, directory, defaultMode) if maxRotatedFiles is not None: self.maxRotatedFiles = int(maxRotatedFiles) if compressLast is not None: self.compressLast = int(compressLast) assert self.compressLast <= self.maxRotatedFiles, ( "Only %d rotate files are kept, cannot compress %d" % (self.maxRotatedFiles, self.compressLast))
def noiseControl(options): # terminal noise/info logic # allows the specification of the log file location if not options["loud"]: log_path = options["log"] log.startLogging(DailyLogFile.fromFullPath(log_path)) return None
def init_logging(logdir, logname): if DEBUG_LEVEL > 0: log.startLogging(sys.stdout) if not os.path.exists(logdir): os.makedirs(logdir) logfile = get_path(os.path.join(logdir, logname)) log.startLogging(DailyLogFile.fromFullPath(logfile))
def _createLogFile(self, uuid): logDirPath = '{cwd}\\..\\logs\\aiprocesses'.format(cwd=os.getcwd()) if not os.path.exists(logDirPath): os.mkdir(logDirPath) logFilePath = '{dir}\\{uuid}.log'.format(dir=logDirPath, uuid=uuid) log.startLogging(DailyLogFile.fromFullPath(logFilePath))
def __init__(self, path, queue, name): threading.Thread.__init__(self) self.queue = queue self.writer = DailyLogFile(name, path) # Don't want this to float around if the rest of the system goes down self.setDaemon(True)
def __init__(self): try: logfile = DailyLogFile.fromFullPath(cfg.logging.filename) except AssertionError: raise AssertionError("Assertion error attempting to open the log file: {0}. Does the directory exist?".format(cfg.logging.filename)) twistedlogger.startLogging(logfile, setStdout=False)
def makeService(_config): global config config = _config if config['logpath'] != None: logFilePath = os.path.abspath(config['logpath']) logFile = DailyLogFile.fromFullPath(logFilePath) else: logFile = sys.stdout log.startLogging(logFile) lumenService = service.MultiService() # Bayeux Service import bayeux bayeuxFactory = bayeux.BayeuxServerFactory() bayeuxService = internet.TCPServer(config['port'], bayeuxFactory) bayeuxService.setServiceParent(lumenService) # WebConsole Service import webconsole site = server.Site(webconsole.WebConsole()) webConsoleService = internet.TCPServer(config['webport'], site) webConsoleService.setServiceParent(lumenService) application = service.Application("lumen") lumenService.setServiceParent(application) return lumenService
def run_normal(self): if self.debug: log.startLogging(sys.stdout) else: log.startLogging(DailyLogFile.fromFullPath(self.logfile)) log.msg("portal server listen %s" % self.portal_host) reactor.listenUDP(self.listen_port, self, interface=self.portal_host)
def make_logfile_observer(path, show_source=False): """ Make an observer that writes out to C{path}. """ from twisted.logger import FileLogObserver from twisted.python.logfile import DailyLogFile f = DailyLogFile.fromFullPath(path) def _render(event): if event.get("log_system", u"-") == u"-": logSystem = u"{:<10} {:>6}".format("Controller", os.getpid()) else: logSystem = event["log_system"] if show_source and event.get("log_namespace") is not None: logSystem += " " + event.get("cb_namespace", event.get("log_namespace", '')) if event.get("log_format", None) is not None: eventText = formatEvent(event) else: eventText = "" if "log_failure" in event: # This is a traceback. Print it. eventText = eventText + event["log_failure"].getTraceback() eventString = NOCOLOUR_FORMAT.format( formatTime(event["log_time"]), logSystem, eventText) + os.linesep return eventString return FileLogObserver(f, _render)
def start_logging(opts): from twisted.python import log from twisted.python.logfile import DailyLogFile if opts.logfile: logfile = DailyLogFile.fromFullPath(opts.logfile) else: logfile = sys.stderr log.startLogging(logfile)
def __init__(self, *args, **kwargs): """ Create a log file rotating on length. @param name: file name. @type name: C{str} @param directory: path of the log file. @type directory: C{str} @param defaultMode: mode used to create the file. @type defaultMode: C{int} @param maxRotatedFiles: if not None, max number of log files the class creates. Warning: it removes all log files above this number. @type maxRotatedFiles: C{int} """ self.maxRotatedFiles = kwargs.pop('maxRotatedFiles', None) DailyLogFile.__init__(self, *args, **kwargs) self._logger = logWithContext(type='console')
def logToDir(directory='logs', LOG_TYPE=('console',), OBSERVER=MyLogObserver): """Call this to write logs to the specified directory, optionally override the FileLogObserver. """ for name in LOG_TYPE: path = os.path.join(directory, name + '.log') logfile = DailyLogFile.fromFullPath(path) logs[name] = OBSERVER(logfile)
def start_logging(self): """Starts logging to log file or stdout depending on config.""" if self.use_log: if self.log_stdout: log.startLogging(sys.stdout) else: log_file = os.path.expanduser(self.log_file) log.startLogging(DailyLogFile.fromFullPath(log_file))
def initLog(log_file, log_path, loglevel=0): global log_level, _tracemsg log_level = loglevel fout = DailyLogFile(log_file, log_path) if _tracemsg : for msg in _tracemsg : fout.write(msg) fout.write('\n') _tracemsg = None class _(log.FileLogObserver): log.FileLogObserver.timeFormat = '%m-%d %H:%M:%S.%f' def emit(self, eventDict): taskinfo = "%r" % stackless.getcurrent() eventDict['system'] = taskinfo[9:-2] log.FileLogObserver.emit(self, eventDict) fl = _(fout) log.startLoggingWithObserver(fl.emit)
def start_logging(opts): from twisted.python import log from twisted.python.logfile import DailyLogFile if opts.logfile: logfile = DailyLogFile.fromFullPath(opts.logfile) else: logfile = sys.stderr log.startLogging(logfile) log.msg("Open files limit: %d" % resource.getrlimit(resource.RLIMIT_NOFILE)[0])
def _handle_logging(self): """ Start logging to file if there is some file configuration and we are not running in development mode """ if self.development is False and self._log_file is not None: self.already_logging = True log.startLogging(DailyLogFile.fromFullPath(self.log_file))
def init(cls, target='stdout', log_level=2, filename='twistd.log'): cls.filename = filename cls.target = target cls.log_level = log_level if cls.target is 'file': logfile = get_filename(filename=cls.filename) log.startLogging(DailyLogFile.fromFullPath(logfile)) else: log.startLogging(stdout)
def configure(**options): global logLevel, logFile LEVELS = { 'debug': logging.DEBUG, 'info': logging.INFO, 'warning': logging.WARNING, 'error': logging.ERROR, 'critical': logging.CRITICAL } logLevel = LEVELS[options.get('loglevel', 'info').lower()] logging.basicConfig(level=logLevel) logFile = options.get('logfile', None) if logFile: # defaultMode=0644 # workaround for https://twistedmatrix.com/trac/ticket/7026 log.startLogging(DailyLogFile.fromFullPath(logFile,defaultMode=0644)) else: log.startLogging(sys.stdout)
def main(options): connection = settings.REDIS_CLASS() log.startLogging(sys.stdout) log.startLogging(DailyLogFile.fromFullPath(os.path.join(settings.LOG_DIRECTORY, 'master.log')), setStdout=1) log.addObserver(RedisLogObserver(connection).emit) factory = TwitterJobTrackerFactory(connection, TwitterJob, settings.MAX_CLIENTS, options=options) reactor.listenTCP(settings.JT_PORT + options.ha, factory) reactor.run()
def setup_logging(): if not os.path.exists(settings.LOG_DIR): os.makedirs(settings.LOG_DIR) if settings.LOG_FILE: logfile = DailyLogFile.fromFullPath( os.path.join(settings.LOG_DIR, settings.LOG_FILE) ) else: logfile = sys.stderr observer = ScrapyrtFileLogObserver(logfile) startLoggingWithObserver(observer.emit, setStdout=False)
def start_logging(opts): import twisted from twisted.python import log if opts.logfile: from twisted.python.logfile import DailyLogFile logfile = DailyLogFile.fromFullPath(opts.logfile) else: logfile = sys.stderr flo = log.startLogging(logfile) if twisted.version.major >= 13: # add microseconds to log flo.timeFormat = "%Y-%m-%d %H:%M:%S.%f%z"
def __init__(self, options=None): """Mamba constructor""" super(Mamba, self).__init__() self.monkey_patched = False self.already_logging = False self._mamba_ver = _mamba_version.version.short() self._ver = _app_ver.short() self._port = 1936 self._log_file = None self._project_ver = _app_project_ver.short() self.name = 'Mamba Webservice v%s' % _mamba_version.version.short() self.description = ( 'Mamba %s is a Web applications framework that works ' 'over Twisted using Jinja2 as GUI enhancement ' 'Mamba has been developed by Oscar Campos ' '<*****@*****.**>' % _mamba_version.version.short() ) self.language = os.environ.get('LANG', 'en_EN').split('_')[0] self.lessjs = False self._parse_options(options) # monkey patch twisted self._monkey_patch() # register log file if any if self.log_file is not None: self.already_logging = True log.startLogging(DailyLogFile.fromFullPath(self.log_file)) # PyPy does not implement set_debug method in gc object if getattr(options, 'debug', False): if hasattr(gc, 'set_debug'): gc.set_debug(gc.DEBUG_STATS | gc.DEBUG_INSTANCES) else: log.msg( 'Debug is set as True but gc object is laking ' 'set_debug method' ) self._header = headers.Headers() self._header.language = self.language self._header.description = self.description self.managers = { 'controller': controller.ControllerManager(), 'model': model.ModelManager() }
def rotate(self): """ Rotate the file and create a new one. If it's not possible to open new logfile, this will fail silently, and continue logging to old logfile. Old log files will be automatically purged. """ #daily rotation first DailyLogFile.rotate(self) if not self.maxRotatedFiles: return if not (os.access(self.directory, os.W_OK) and os.access(self.path, os.W_OK)): return logs = self.listLogs() while len(logs) >= self.maxRotatedFiles: l = logs.pop(0) #this should never match, but just make sure if l.endswith('log'): continue self.notification('deleting %s' % l) os.remove(l)
def start(self): ''' Start a daemon, bittorrent tracker, bittorrent client ''' from flowcontrol import FlowControl from twisted.internet import reactor from twisted.python import log from twisted.python.logfile import DailyLogFile log.startLogging(DailyLogFile.fromFullPath(self.f.log_dir + "btcp.log")) #self.tt = bttracker(self.ts_name) # !!! Code bttracker() !!! #self.ts = bttorrent(self.ts_name) # !!! Code btTorrent() !!! self.fc = FlowControl(f=self.f) # !!! Code FlowControle() !!! reactor.callLater(self.interval, self.fc._tick) # schedule to run next time in self.interval reactor.callLater(self.interval + 10, self.fc._tack) # schedule to run next time in self.interval self.blog.debug('BtCP.start: started!') ''' !!! Code me !!! '''
def start(): from twisted.python import log from twisted.python.logfile import DailyLogFile from twisted.internet import reactor if settings['devMode'] == False: #Setup WebSocket log.startLogging(DailyLogFile.fromFullPath(settings['release']['log'])) factory = WebSocketServerFactory(u"ws://127.0.0.1:9001") factory.protocol = ServerRobotController reactor.listenTCP(9001, factory) reactor.run()
def run_normal(self): if self.debug: log.startLogging(sys.stdout) else: log.startLogging(DailyLogFile.fromFullPath(self.logfile)) log.msg("server listen %s" % self.host) if self.use_ssl: log.msg("Control SSL Enable!") from twisted.internet import ssl sslContext = ssl.DefaultOpenSSLContextFactory(self.privatekey, self.certificate) reactor.listenSSL(self.port, self.web_factory, contextFactory=sslContext, interface=self.host) else: reactor.listenTCP(self.port, self.web_factory, interface=self.host) if not self.standalone: reactor.run()
def __init__(self, args=None): '''Args must be an object with the following attributes: foreground, logfile, mailbox, nClients, silent, socketpath, verbose Suitable defaults will be supplied.''' # Pass command line args to ProtocolIVSHMSG, then open logging. if args is None: args = argparse.Namespace() for arg, default in self._required_arg_defaults.items(): setattr(args, arg, getattr(args, arg, default)) # Mailbox may be sized above the requested number of clients to # satisfy QEMU IVSHMEM restrictions. args.server_id = args.nClients + 1 args.nEvents = args.nClients + 2 FAMEZ_MailBox(args=args) # singleton class, no need to keep instance self.cmdlineargs = args if args.foreground: TPlog.startLogging(sys.stdout, setStdout=False) else: PRINT('Logging to %s' % args.logfile) TPlog.startLogging( DailyLogFile.fromFullPath(args.logfile), setStdout=True) # "Pass-through" explicit print() for debug args.logmsg = TPlog.msg args.logerr = TPlog.err # By Twisted version 18, "mode=" is deprecated and you should just # inherit the tacky bit from the parent directory. wantPID creates # <path>.lock as a symlink to "PID". E = UNIXServerEndpoint( TIreactor, args.socketpath, mode=0o666, # Deprecated at Twisted 18 wantPID=True) E.listen(self) args.logmsg('FAME-Z server @%d ready for %d clients on %s' % (args.server_id, args.nClients, args.socketpath))
def run_command_start(options): """ Subcommand "crossbar start". """ ## start Twisted logging ## if not options.logdir: logfd = sys.stderr else: from twisted.python.logfile import DailyLogFile logfd = DailyLogFile.fromFullPath(os.path.join(options.logdir, 'node.log')) from crossbar.twisted.process import DefaultSystemFileLogObserver flo = DefaultSystemFileLogObserver(logfd, system = "{:<10} {:>6}".format("Controller", os.getpid())) log.startLoggingWithObserver(flo.emit) log.msg("=" * 30 + " Crossbar.io " + "=" * 30 + "\n") import crossbar log.msg("Crossbar.io {} starting".format(crossbar.__version__)) ## we use an Autobahn utility to import the "best" available Twisted reactor ## reactor = install_reactor(options.reactor, options.debug) from twisted.python.reflect import qual log.msg("Running on {} using {} reactor".format(platform.python_implementation(), qual(reactor.__class__).split('.')[-1])) log.msg("Starting from node directory {}".format(options.cbdir)) ## create and start Crossbar.io node ## from crossbar.controller.node import Node node = Node(reactor, options) node.start() reactor.run()
def start(self, app): """ 启动APP :param app: :return: """ self.start_time = reactor.seconds() if self.mode == MULTI_SERVER_MODE: self.create_master() servers = Config().servers for name in servers.keys(): self.create_node(name) elif self.mode == SINGLE_SERVER_MODE: self.create_node(self.node) else: self.create_master() reactor.addSystemEventTrigger('after', 'startup', self.startAfter) reactor.addSystemEventTrigger('before', 'shutdown', self.stopBefore) if "-y" in sys.argv and "-n" not in sys.argv: app.setComponent( log.ILogObserver, log.FileLogObserver(DailyLogFile("logs/master.log", "")).emit) self.service.setServiceParent(app) GlobalObject().server = self
def main(): if len(sys.argv) < 2: print 'Usage: %s config_file' % sys.argv[0] sys.exit() log.startLogging(sys.stdout) Config.init(sys.argv[1]) if Config.debug: log.startLogging(sys.stdout) else: log.startLogging(DailyLogFile.fromFullPath(Config.get('log.file'))) handler = SnowflakeServiceHandler(Config.getint('worker.id'), Config.getint('datacenter.id')) processor = SnowflakeService.Processor(handler) server = TTwisted.ThriftServerFactory( processor=processor, iprot_factory=TBinaryProtocol.TBinaryProtocolFactory()) reactor.listenTCP(Config.getint('port', default=9999), server, interface=Config.get('listen', default="0.0.0.0")) reactor.run()
def make_logfile_observer(path, show_source=False): """ Make an observer that writes out to C{path}. """ from twisted.logger import FileLogObserver from twisted.python.logfile import DailyLogFile f = DailyLogFile.fromFullPath(path) def _render(event): if event.get("log_system", u"-") == u"-": logSystem = u"{:<10} {:>6}".format("Controller", os.getpid()) else: logSystem = event["log_system"] if show_source and event.get("log_namespace") is not None: logSystem += " " + event.get("cb_namespace", event.get("log_namespace", '')) if event.get("log_format", None) is not None: eventText = formatEvent(event) else: eventText = u"" if "log_failure" in event: # This is a traceback. Print it. eventText = eventText + event["log_failure"].getTraceback() eventString = strip_ansi(STANDARD_FORMAT.format( startcolour=u'', time=formatTime(event["log_time"]), system=logSystem, endcolour=u'', text=eventText)) + os.linesep return eventString return FileLogObserver(f, _render)
def run(**options): if 'ENABLE_TERMINAL_EVENT_FEED' in options: global ENABLE_TERMINAL_EVENT_FEED ENABLE_TERMINAL_EVENT_FEED = options['ENABLE_TERMINAL_EVENT_FEED'] if 'LOGFILE_FULLPATH' in options: from twisted.python.logfile import DailyLogFile global ENABLE_LOGFILE ENABLE_LOGFILE = True log.startLogging(DailyLogFile.fromFullPath( options['LOGFILE_FULLPATH']), setStdout=False) if 'ADMIN_KEY' in options: admin.ADMIN_KEY = options['ADMIN_KEY'] reactor.listenTCP(3511, admin.Factory()) assert 'DB_CONNECTION' in options import mysql.connector db.getConnection = lambda: mysql.connector.connect(**options[ 'DB_CONNECTION']) db.debug = debug admin.UserDB = db.UserDB # reactor.listenUDP(3513, P2pDataSwitch()) reactor.listenTCP(3512, ServerFactory()) reactor.callLater(900, tcpGC) reactor.run()
def initLog(log_file, log_path, loglevel=0): global log_level, _tracemsg log_level = loglevel fout = DailyLogFile(log_file, log_path) if _tracemsg: for msg in _tracemsg: fout.write(msg) fout.write('\n') _tracemsg = None class _(log.FileLogObserver): log.FileLogObserver.timeFormat = '%m-%d %H:%M:%S.%f' def emit(self, eventDict): taskinfo = "%r" % stackless.getcurrent() eventDict['system'] = taskinfo[9:-2] log.FileLogObserver.emit(self, eventDict) fl = _(fout) log.startLoggingWithObserver(fl.emit)
for service in service_config.sections(): if 'Yes' == service_config.get(service, 'enabled'): [low_protocol, low_port] = service_config.get(service, 'low_port').split(':') [protocol, port] = service_config.get(service, 'port').split(':') if int(low_port) < 1024: ipt_file.write('./ipt_set_' + low_protocol + ' ' + low_port + ' ' + port + '\n') # set file permissin, close, and quit os.chmod(ipt_file_name, 0744) ipt_file.close() quit() log_file = DailyLogFile(log_file_name, log_path) file_log_observer = FileLogObserver(log_file) time_zone = subprocess.check_output(['date', '+%z']) file_log_observer.timeFormat = "%Y-%m-%d %H:%M:%S,%f," + time_zone.rstrip() # start logging log.startLoggingWithObserver(file_log_observer.emit, False) if 'Yes' == honeypy_config.get('twitter', 'enabled') or \ 'Yes' == honeypy_config.get('honeydb', 'enabled') or \ 'Yes' == honeypy_config.get('slack', 'enabled') or \ 'Yes' == honeypy_config.get('logstash', 'enabled') or \ 'Yes' == honeypy_config.get('elasticsearch', 'enabled'): # tail log file when reactor runs triageConfig(honeypy_config) tailer = lib.followtail.FollowTail(log_path + log_file_name)
def make_file_observer(path, log_level_name): folder, fname = os.path.split(path) logfile = DailyLogFile(fname, folder) observer = FileLogObserver(logfile) return make_wrapped_observer(observer, log_level_name)
def __init__(self, interface, config): self.config = config if config.get('random_rotation', False): self.map_rotator_type = random_choice_cycle else: self.map_rotator_type = itertools.cycle self.default_time_limit = config.get('default_time_limit', 20.0) self.default_cap_limit = config.get('cap_limit', 10.0) self.advance_on_win = int(config.get('advance_on_win', False)) self.win_count = itertools.count(1) self.bans = NetworkDict() try: self.bans.read_list(json.load(open('bans.txt', 'rb'))) except IOError: pass self.hard_bans = set() # possible DDoS'ers are added here self.player_memory = deque(maxlen=100) self.config = config if len(self.name) > MAX_SERVER_NAME_SIZE: print '(server name too long; it will be truncated to "%s")' % ( self.name[:MAX_SERVER_NAME_SIZE]) self.respawn_time = config.get('respawn_time', 8) self.respawn_waves = config.get('respawn_waves', False) game_mode = config.get('game_mode', 'ctf') if game_mode == 'ctf': self.game_mode = CTF_MODE elif game_mode == 'tc': self.game_mode = TC_MODE elif self.game_mode is None: raise NotImplementedError('invalid game mode: %s' % game_mode) self.game_mode_name = game_mode team1 = config.get('team1', {}) team2 = config.get('team2', {}) self.team1_name = team1.get('name', 'Blue') self.team2_name = team2.get('name', 'Green') self.team1_color = tuple(team1.get('color', (0, 0, 196))) self.team2_color = tuple(team2.get('color', (0, 196, 0))) self.friendly_fire = config.get('friendly_fire', True) self.friendly_fire_time = config.get('grief_friendly_fire_time', 2.0) self.spade_teamkills_on_grief = config.get('spade_teamkills_on_grief', False) self.fall_damage = config.get('fall_damage', True) self.teamswitch_interval = config.get('teamswitch_interval', 0) self.max_players = config.get('max_players', 20) self.melee_damage = config.get('melee_damage', 100) self.max_connections_per_ip = config.get('max_connections_per_ip', 0) self.passwords = config.get('passwords', {}) self.server_prefix = encode(config.get('server_prefix', '[*]')) self.time_announcements = config.get('time_announcements', [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 30, 60, 120, 180, 240, 300, 600, 900, 1200, 1800, 2400, 3000 ]) self.balanced_teams = config.get('balanced_teams', None) self.login_retries = config.get('login_retries', 1) # voting configuration self.default_ban_time = config.get('default_ban_duration', 24 * 60) self.speedhack_detect = config.get('speedhack_detect', True) if config.get('user_blocks_only', False): self.user_blocks = set() self.set_god_build = config.get('set_god_build', False) self.debug_log = config.get('debug_log', False) if self.debug_log: pyspades.debug.open_debug_log() ssh = config.get('ssh', {}) if ssh.get('enabled', False): from ssh import RemoteConsole self.remote_console = RemoteConsole(self, ssh) irc = config.get('irc', {}) if irc.get('enabled', False): from irc import IRCRelay self.irc_relay = IRCRelay(self, irc) status = config.get('status_server', {}) if status.get('enabled', False): from statusserver import StatusServerFactory self.status_server = StatusServerFactory(self, status) publish = config.get('ban_publish', {}) if publish.get('enabled', False): from banpublish import PublishServer self.ban_publish = PublishServer(self, publish) ban_subscribe = config.get('ban_subscribe', {}) if ban_subscribe.get('enabled', True): import bansubscribe self.ban_manager = bansubscribe.BanManager(self, ban_subscribe) logfile = config.get('logfile', None) if logfile is not None and logfile.strip(): if config.get('rotate_daily', False): create_filename_path(logfile) logging_file = DailyLogFile(logfile, '.') else: logging_file = open_create(logfile, 'a') log.addObserver(log.FileLogObserver(logging_file).emit) log.msg('pyspades server started on %s' % time.strftime('%c')) log.startLogging(sys.stdout) # force twisted logging self.start_time = reactor.seconds() self.end_calls = [] self.console = create_console(self) for password in self.passwords.get('admin', []): if password == 'replaceme': print 'REMEMBER TO CHANGE THE DEFAULT ADMINISTRATOR PASSWORD!' elif not password: self.everyone_is_admin = True for user_type, func_names in config.get('rights', {}).iteritems(): for func_name in func_names: commands.add_rights(func_name, user_type) port = self.port = config.get('port', 32887) ServerProtocol.__init__(self, port, interface) self.host.receiveCallback = self.receive_callback ret = self.set_map_rotation(config['maps']) if not ret: print 'Invalid map in map rotation (%s), exiting.' % ret.map raise SystemExit self.update_format() self.tip_frequency = config.get('tip_frequency', 0) if self.tips is not None and self.tip_frequency > 0: reactor.callLater(self.tip_frequency * 60, self.send_tip) self.master = config.get('master', True) self.set_master() get_external_ip(config.get('network_interface', '')).addCallback(self.got_external_ip)
iocpreactor.install() except: from twisted.internet import selectreactor selectreactor.install() from pubsub import PubProtocol from twisted.internet import reactor, protocol, endpoints from twisted.protocols import basic from twisted.internet.protocol import ReconnectingClientFactory from twisted.python import log from twisted.python.logfile import DailyLogFile from twisted.logger import Logger, textFileLogObserver logfile = DailyLogFile.fromFullPath("broker.log") log.startLogging(logfile) log = Logger(observer=textFileLogObserver(logfile)) class BrokerPubProtocol(PubProtocol): def __init__(self, factory, serializer=json): self.factory = factory self.uids = set() self.serializer = json def connectionLost(self, reason): self._clean_uids() self.factory.refresh_uids()
def getTextFileObserver(name="nucypher.log", path=USER_LOG_DIR): _get_or_create_user_log_dir() logfile = DailyLogFile(name, path) observer = FileLogObserver(formatEvent=formatEventAsClassicLogText, outFile=logfile) return observer
def log_to_dir(self, logdir): self.logdir = logdir self.console_logfile = DailyLogFile('console.log', logdir) self.custom_logs = {} self.observer = self.logdir_observer
def getJsonFileObserver(name="ursula.log.json", path=USER_LOG_DIR): # TODO: More configurable naming here? _get_or_create_user_log_dir() logfile = DailyLogFile(name, path) observer = jsonFileLogObserver(outFile=logfile) return observer
import sys import re from sympy import sympify from twisted.web.static import File from twisted.python import log from twisted.web.server import Site from twisted.internet import reactor from autobahn.twisted.websocket import WebSocketServerFactory, \ WebSocketServerProtocol from autobahn.twisted.resource import WebSocketResource from twisted.python.logfile import DailyLogFile log.startLogging(DailyLogFile.fromFullPath("server.log")) class SomeServerProtocol(WebSocketServerProtocol): def onOpen(self): try: self.factory.register(self) except: pass def connectionLost(self, reason): try: self.factory.unregister(self) except: pass
from twisted.python import log from SharedLib.Interfaces import IMainSetup args = IMainSetup("string") if args.log == "-" and hasattr(args, "ascii") and args.ascii: args.log = '/dev/null' if args.log in ("", "-", "stdout", "stdio"): import sys logTo = sys.stdout else: from twisted.python.logfile import DailyLogFile logTo = DailyLogFile.fromFullPath(args.log) log.startLogging(logTo) from pprint import pformat if args.debug or args.verbose: log.msg("Startup args:\n%s" % pformat(vars(args)))
from twisted.python import log from twisted.python.logfile import DailyLogFile from twisted.enterprise import adbapi from comm.comm import loglocal from sys import argv import binascii from CRC16.CRC16 import CRC16_1 import time import datetime import queue import pymysql import sys import pika import json log.startLogging(DailyLogFile.fromFullPath(loglocal)) """上报数据解析流程: A0 16 01 02 03 04 05 06 07 08 09 0A 0B 0C 01 04 00 0B 0A 01 F2 60 1、获取数据模型 按照uuid获取模型 2、获取模型对应的值 model_struct = { 'model_name':'khb_product_1', 'data_type' : { 'A0':{'02':'Float','03':'Float'}, 'A1':{'03':'Float','04':'UInt'} } 'updata_procotol':...., ....
LOGBOT_CHANNEL = config["server"]["channel"] LOGBOT_LOCATION = config["server"]["ip"] LOGBOT_PORT = config["server"]["port"] LOGBOT_NAME = config["server"]["name"] or platform.node().split( ".")[0][:15] if not LOGBOT_NAME: print "Cannot detect a name for the client (usually means the system hostname cannot be detected)" sys.exit(1) OPER_CREDENTIALS = (config["server"]["user"], config["server"]["password"]) TAIL_LOCATION = config["logbot"]["tail_location"][ platform.system().lower()] OTHER_CHANNELS = [ config["files"][x].get("channel", None) for x in config["files"] if config["files"][x].get("channel", None) ] application = service.Application('LogBot') #, uid=1, gid=1) if not os.path.exists("logs"): os.mkdir("logs") logfile = DailyLogFile("logbot.log", "logs") log_observer = FileLogObserver(logfile).emit #application.setComponent(ILogObserver, log_observer) log.addObserver(log_observer) serviceCollection = service.IServiceCollection(application) manager = FileManager(config, serviceCollection) for item in config["files"]: manager.add_file( TailedFile(item, config["files"][item]["path"], config["files"][item].get("channel", None)))
def connectionLost(self, why): print "connection lost:", why class XmlrpcHandler(cyclone.xmlrpc.XmlrpcRequestHandler): allowNone = True def xmlrpc_echo(self, text): return text try: raise Exception("COMMENT_THIS_LINE_AND_LOG_TO_DAILY_FILE") from twisted.python.logfile import DailyLogFile logFile = DailyLogFile.fromFullPath("server.log") print("Logging to daily log file: server.log") except Exception as e: import sys logFile = sys.stdout run( host="127.0.0.1", port=8888, log=logFile, debug=True, static_path="./static", template_path="./template", locale_path="./locale", login_url="/auth/login", cookie_secret="32oETzKXQAGaYdkL5gEmGeJJFuYh7EQnp2XdTP1o/Vo=",
def suffix(self, tupledate): if len(tupledate) < 3: # just in case return DailyLogFile.suffix(self, tupledate) return "{:04d}_{:02d}_{:02d}".format(*tupledate[:3])
__author__ = 'Fede M' import io from twisted.logger import Logger, textFileLogObserver from twisted.python import log from twisted.python.logfile import DailyLogFile logfile = DailyLogFile.fromFullPath("pubsub.log") log.startLogging(logfile) log = Logger(observer=textFileLogObserver(logfile)) # Code for PyInstaller import sys import uuid if 'twisted.internet.reactor' in sys.modules: del sys.modules['twisted.internet.reactor'] # Try to use the best reactor available try: from twisted.internet import epollreactor epollreactor.install() except: try: from twisted.internet import iocpreactor iocpreactor.install() except:
see https://twistedmatrix.com/documents/current/web/howto/using-twistedweb.html """ isLeaf = True # pylint: disable=unused-argument,no-self-use def render_GET(self, request): """ Return a simple html document """ return b'<html><body><h1>Serving</h1></body></html>' if __name__ == '__main__': log.startLogging(DailyLogFile.fromFullPath('/tmp/arscca-twisted.log')) root = Resource() factory = WebSocketServerFactory('ws://127.0.0.1:6544') factory.protocol = SomeServerProtocol resource = WebSocketResource(factory) # websockets resource on '/ws' path root.putChild(b'ws', resource) # status page on '/' path status_page = StatusPage() root.putChild(b'', status_page) watcher = Watcher(Dispatcher.file_updated)
log.err('process exit, error[%s]' % result.getErrorMessage()) self._work_d = None def _readstat(self, statfile): if self._work_d is None: self._start_work() if self._query_protocols and os.path.isfile(statfile): try: with open(statfile, 'rb') as f: self._work_stat = f.read() except Exception as err: self._work_stat = err.message log.err(err.message) out = [ 'run times: %d\n\n' % (self._restart_times), self._work_stat ] out = ''.join(out) for protocol in self._query_protocols: protocol.sendLine(out) # 创建log目录 if not os.path.isdir('./collectord_log'): os.mkdir('./collectord_log') application = service.Application('collectord') logfile = DailyLogFile('collectord.log', './collectord_log') application.setComponent(ILogObserver, FileLogObserver(logfile).emit) CollectorServices(31000).setServiceParent(application)
def close(self): #self._lock.unlock() DailyLogFile.close(self)
# Aether imports from InputOutput import aetherProtocol from DecisionEngine import eventLoop from ORM import Demeter # Without this line, the networking process can't communicate with main on Windows when frozen. # Possibly also valid for OS X. Ignore PyCharm 'unused import' warning. if globals.userProfile.get( 'debugDetails', 'debugLogging' ): # Debug enabled. Keep print enabled, and route it to the logs. from twisted.python import log from twisted.python.logfile import DailyLogFile log.startLogging( DailyLogFile.fromFullPath(PROFILE_DIR + '/Logs/network.log')) globals.logSystemDetails() else: # Debug not enabled. Disable print def print(*a, **kwargs): pass if FROZEN: print('Networking Daemon: I am frozen.') else: print('Networking Daemon: I am thawed.') def main():
from twisted.application.service import Application from twisted.python.log import ILogObserver, FileLogObserver from twisted.python.logfile import DailyLogFile application = Application("Pollapli") logfile = DailyLogFile("pollapli.log", ".")#"/tmp") application.setComponent(ILogObserver, FileLogObserver(logfile).emit)
def __init__(self, *args, **kwargs): DailyLogFile.__init__(self, *args, **kwargs) # avoid circular dependencies from carbon.conf import settings self.enableRotation = settings.ENABLE_LOGROTATION
def apply_logging(self): # We're using twisted logging only for IO. from twisted.python.logger import FileLogObserver from twisted.python.logger import Logger, LogLevel, globalLogPublisher LOGLEVEL_TWISTED_MAP = { logging.DEBUG: LogLevel.debug, logging.INFO: LogLevel.info, logging.WARN: LogLevel.warn, logging.ERROR: LogLevel.error, logging.CRITICAL: LogLevel.critical, } class TwistedHandler(logging.Handler): def emit(self, record): assert isinstance(record, logging.LogRecord) Logger(record.name).emit(LOGLEVEL_TWISTED_MAP[record.levelno], log_text=self.format(record)) if self.logger_dest is not None: from twisted.python.logfile import DailyLogFile self.logger_dest = abspath(self.logger_dest) if access(dirname(self.logger_dest), os.R_OK | os.W_OK): log_dest = DailyLogFile.fromFullPath(self.logger_dest) else: Logger().warn("%r is not accessible. We need rwx on it to " "rotate logs." % dirname(self.logger_dest)) log_dest = open(self.logger_dest, 'wb+') formatter = logging.Formatter(self.LOGGING_PROD_FORMAT) else: formatter = logging.Formatter(self.LOGGING_DEVEL_FORMAT) log_dest = open('/dev/stdout', 'wb+') try: import colorama colorama.init() logger.debug("colorama loaded.") except Exception as e: logger.debug("coloarama not loaded: %r" % e) def record_as_string(record): if 'log_text' in record: return record['log_text'] + "\n" if 'message' in record: return record['message'] + "\n" if 'log_failure' in record: failure = record['log_failure'] return "%s: %s" % (failure.type, pformat(vars(failure.value))) return pformat(record) observer = FileLogObserver(log_dest, record_as_string) globalLogPublisher.addObserver(observer) handler = TwistedHandler() handler.setFormatter(formatter) logging.getLogger().addHandler(handler) for l in self._loggers or []: l.apply() if self.log_rpc or self.log_queries or self.log_results: logging.getLogger().setLevel(logging.DEBUG) if self.log_rpc: logging.getLogger('spyne.protocol').setLevel(logging.DEBUG) logging.getLogger('spyne.protocol.xml').setLevel(logging.DEBUG) logging.getLogger('spyne.protocol.dictdoc').setLevel(logging.DEBUG) if self.log_queries: logging.getLogger('sqlalchemy').setLevel(logging.INFO) if self.log_results: logging.getLogger('sqlalchemy').setLevel(logging.DEBUG)
def __init__(self, interface: bytes, config_dict: Dict[str, Any]) -> None: # logfile path relative to config dir if not abs path log_filename = logfile.get() if log_filename.strip(): # catches empty filename if not os.path.isabs(log_filename): log_filename = os.path.join(config.config_dir, log_filename) ensure_dir_exists(log_filename) if logging_rotate_daily.get(): logging_file = DailyLogFile(log_filename, '.') else: logging_file = open(log_filename, 'a') predicate = LogLevelFilterPredicate( LogLevel.levelWithName(loglevel.get())) observers = [ FilteringLogObserver(textFileLogObserver(sys.stderr), [predicate]), FilteringLogObserver(textFileLogObserver(logging_file), [predicate]) ] globalLogBeginner.beginLoggingTo(observers) log.info('piqueserver started on %s' % time.strftime('%c')) self.config = config_dict if random_rotation.get(): self.map_rotator_type = random_choice_cycle else: self.map_rotator_type = itertools.cycle self.default_time_limit = default_time_limit.get() self.default_cap_limit = cap_limit.get() self.advance_on_win = int(advance_on_win.get()) self.win_count = itertools.count(1) self.bans = NetworkDict() # attempt to load a saved bans list try: with open(os.path.join(config.config_dir, bans_file.get()), 'r') as f: self.bans.read_list(json.load(f)) log.debug("loaded {count} bans", count=len(self.bans)) except FileNotFoundError: log.debug("skip loading bans: file unavailable", count=len(self.bans)) except IOError as e: log.error('Could not read bans.txt: {}'.format(e)) except ValueError as e: log.error('Could not parse bans.txt: {}'.format(e)) self.hard_bans = set() # possible DDoS'ers are added here self.player_memory = deque(maxlen=100) if len(self.name) > MAX_SERVER_NAME_SIZE: log.warn('(server name too long; it will be truncated to "%s")' % (self.name[:MAX_SERVER_NAME_SIZE])) self.respawn_time = respawn_time_option.get() self.respawn_waves = respawn_waves.get() # since AoS only supports CTF and TC at a protocol level, we need to get # the base game mode if we are using a custom game mode. game_mode_name = game_mode.get() if game_mode_name == 'ctf': self.game_mode = CTF_MODE elif game_mode.get() == 'tc': self.game_mode = TC_MODE elif self.game_mode not in [CTF_MODE, TC_MODE]: raise ValueError( 'invalid game mode: custom game mode "{}" does not set ' 'protocol.game_mode to one of TC_MODE or CTF_MODE. Are ' 'you sure the thing you have specified is a game mode?'.format( game_mode_name)) self.game_mode_name = game_mode.get().split('.')[-1] self.team1_name = team1_name.get()[:9] self.team2_name = team2_name.get()[:9] self.team1_color = tuple(team1_color.get()) self.team2_color = tuple(team2_color.get()) self.friendly_fire = friendly_fire.get() self.friendly_fire_on_grief = friendly_fire_on_grief.get() self.friendly_fire_time = grief_friendly_fire_time.get() self.spade_teamkills_on_grief = spade_teamkills_on_grief.get() self.fall_damage = fall_damage.get() self.teamswitch_interval = teamswitch_interval.get() self.teamswitch_allowed = teamswitch_allowed.get() self.max_players = max_players.get() self.melee_damage = melee_damage.get() self.max_connections_per_ip = max_connections_per_ip.get() self.passwords = passwords.get() self.server_prefix = server_prefix.get() self.time_announcements = time_announcements.get() self.balanced_teams = balanced_teams.get() self.login_retries = login_retries.get() # voting configuration self.default_ban_time = default_ban_duration.get() self.speedhack_detect = speedhack_detect.get() self.rubberband_distance = rubberband_distance.get() if user_blocks_only.get(): self.user_blocks = set() self.set_god_build = set_god_build.get() self.debug_log = debug_log_enabled.get() if self.debug_log: # TODO: make this configurable pyspades.debug.open_debug_log( os.path.join(config.config_dir, 'debug.log')) if ssh_enabled.get(): from piqueserver.ssh import RemoteConsole self.remote_console = RemoteConsole(self) irc = irc_options.get() if irc.get('enabled', False): from piqueserver.irc import IRCRelay self.irc_relay = IRCRelay(self, irc) if status_server_enabled.get(): from piqueserver.statusserver import StatusServer self.status_server = StatusServer(self) ensureDeferred(self.status_server.listen()) if ban_publish.get(): from piqueserver.banpublish import PublishServer self.ban_publish = PublishServer(self, ban_publish_port.get()) if bans_urls.get(): from piqueserver import bansubscribe self.ban_manager = bansubscribe.BanManager(self) self.start_time = time.time() self.end_calls = [] # TODO: why is this here? create_console(self) for user_type, func_names in rights.get().items(): for func_name in func_names: commands.add_rights(user_type, func_name) self.port = port_option.get() ServerProtocol.__init__(self, self.port, interface) self.host.intercept = self.receive_callback try: self.set_map_rotation(self.config['rotation']) except MapNotFound as e: log.critical('Invalid map in map rotation (%s), exiting.' % e.map) raise SystemExit map_load_d = self.advance_rotation() # discard the result of the map advance for now map_load_d.addCallback(lambda x: self._post_init()) ip_getter = ip_getter_option.get() if ip_getter: ensureDeferred(as_deferred(self.get_external_ip(ip_getter))) self.new_release = None notify_new_releases = config.option("release_notifications", default=True) if notify_new_releases.get(): ensureDeferred(as_deferred(self.watch_for_releases())) self.vacuum_loop = LoopingCall(self.vacuum_bans) # Run the vacuum every 6 hours, and kick it off it right now self.vacuum_loop.start(60 * 60 * 6, True) reactor.addSystemEventTrigger('before', 'shutdown', lambda: ensureDeferred(self.shutdown()))
def shouldRotate(self): if self.enableRotation: return DailyLogFile.shouldRotate(self) else: return False