class DummyLogFile(object): '''Dummy log file used for testing.''' def __init__( self, worker_id, directory, rotateLength, maxRotatedFiles): self.worker_id = worker_id self.directory = directory self.rotateLength = rotateLength self.maxRotatedFiles = maxRotatedFiles self.closed_count = 0 self.logfile = LogFile( worker_id, directory, rotateLength=rotateLength, maxRotatedFiles=maxRotatedFiles) self.path = self.logfile.path @property def logs(self): reader = self.logfile.getCurrentLog() logs = [] lines = reader.readLines() while lines: logs.extend(lines) lines = reader.readLines() return logs def write(self, data): self.logfile.write(data) self.logfile.flush() def close(self): self.closed_count += 1 def listLogs(self): return []
def startService(self): self.logfile = LogFile( self.worker_id, self.path, rotateLength=self.rotate, maxRotatedFiles=self.max_files) self.log_observer = JunebugLogObserver(self.logfile, self.worker_id) self.logger.addObserver(self.log_observer) return super(JunebugLoggerService, self).startService()
class DummyLogFile(object): '''Dummy log file used for testing.''' def __init__(self, worker_id, directory, rotateLength, maxRotatedFiles): self.worker_id = worker_id self.directory = directory self.rotateLength = rotateLength self.maxRotatedFiles = maxRotatedFiles self.closed_count = 0 self.logfile = LogFile(worker_id, directory, rotateLength=rotateLength, maxRotatedFiles=maxRotatedFiles) self.path = self.logfile.path @property def logs(self): reader = self.logfile.getCurrentLog() logs = [] lines = reader.readLines() while lines: logs.extend(lines) lines = reader.readLines() return logs def write(self, data): self.logfile.write(data) self.logfile.flush() def close(self): self.closed_count += 1 def listLogs(self): return []
def __init__(self, name, directory, rotateLength=1000000, defaultMode=None, maxRotatedFiles=None): LogFile.__init__(self, name, directory, rotateLength, defaultMode, maxRotatedFiles)
def startService(self): if not os.path.exists(self.path): os.makedirs(self.path, 0755) self.logfile = LogFile( self.worker_id, self.path, rotateLength=self.rotate, maxRotatedFiles=self.max_files) self.log_observer = JunebugLogObserver(self.logfile, self.worker_id) self.logger.addObserver(self.log_observer) return super(JunebugLoggerService, self).startService()
def __init__(self, name,group): self.orgName = name self.name = "".join([x for x in name if x.isalnum()]) self.group = group self.logFile = LogFile(self.name+".log",group.groupDir,rotateLength=100000000,maxRotatedFiles=10)#10M self.updateLogFile = LogFile(self.name+".ulog",group.groupDir,rotateLength=100000000,maxRotatedFiles=5) self.ssLogFile = LogFile(self.name+".slog",group.groupDir,rotateLength=100000000,maxRotatedFiles=5) self.status = PROC_STATUS.STOP self.endTime = None self.startMemo = ''
def __init__(self, worker_id, directory, rotateLength, maxRotatedFiles): self.worker_id = worker_id self.directory = directory self.rotateLength = rotateLength self.maxRotatedFiles = maxRotatedFiles self.closed_count = 0 self.logfile = LogFile(worker_id, directory, rotateLength=rotateLength, maxRotatedFiles=maxRotatedFiles) self.path = self.logfile.path
def start(): root = static.File(os.environ['JPD_HTDOCS_PATH']) root.processors = {'.rpy': script.ResourceScript} root = rewrite.RewriterResource( root, rewrite.alias('cgi-bin/get_icon.py', 'get_icon.rpy')) site = JolicloudWSSite(root) site.addHandler('/jolicloud/', JolicloudWSHandler) # Setting up the log file path if os.environ.get('JPD_SYSTEM', '0') == '1': if os.getuid(): log.err('You must be root to run this daemon in system mode.') exit() log_path = '/var/log' else: try: import xdg.BaseDirectory log_path = xdg.BaseDirectory.save_data_path( 'Jolicloud', 'jolicloud-daemon') except ImportError: log_path = os.path.join(os.getenv('HOME'), '.local', 'share', 'Jolicloud', 'jolicloud-daemon') port = int( os.environ.get('JPD_PORT', 804 if os.environ.get('JPD_SYSTEM', None) else 8004)) # http://twistedmatrix.com/documents/9.0.0/web/howto/using-twistedweb.html#auto5 if os.environ.get('JPD_DEBUG', '0') == '1': log.startLogging(sys.stdout) log.startLogging( LogFile('jolicloud-daemon.log', log_path, maxRotatedFiles=2)) reactor.listenTCP(port, site) else: log.startLogging( LogFile('jolicloud-daemon.log', log_path, maxRotatedFiles=2)) reactor.listenTCP(port, site, interface='127.0.0.1') # TODO, use random port for session daemon # We load the plugins: if os.environ.get('JPD_SYSTEM', '0') == '1': log.msg('We load the system plugins.') plugins = getPlugins(ijolidaemon.ISystemManager, managers) else: log.msg('We load the session plugins.') plugins = getPlugins(ijolidaemon.ISessionManager, managers) for plugin in plugins: log.msg(plugin.__class__.__name__) reactor.run()
def get_json_file_observer(name=DEFAULT_JSON_LOG_FILENAME, path=USER_LOG_DIR): _ensure_dir_exists(path) logfile = LogFile(name=name, directory=path, rotateLength=MAXIMUM_LOG_SIZE, maxRotatedFiles=MAX_LOG_FILES) observer = jsonFileLogObserver(outFile=logfile) return observer
def get_file(): path.parent().makedirs(ignoreExistingDirectory=True) return LogFile( path.basename(), path.dirname(), rotateLength=rotate_length, maxRotatedFiles=max_rotated_files, )
def __init__( self, worker_id, path, rotateLength, maxRotatedFiles): self.worker_id = worker_id self.path = path self.rotateLength = rotateLength self.maxRotatedFiles = maxRotatedFiles self.closed_count = 0 self.logfile = LogFile.fromFullPath( path, rotateLength=rotateLength, maxRotatedFiles=maxRotatedFiles)
def __init__(self, nick, server, channel, port, loggingfile=None): self.server = server self.port = port if loggingfile is not None: log.startLogging(LogFile.fromFullPath(loggingfile)) self.factory = ConversationBotFactory(self, ConversationBotClient, nick, channel)
def get_text_file_observer(name=DEFAULT_LOG_FILENAME, path=USER_LOG_DIR): _ensure_dir_exists(path) logfile = LogFile(name=name, directory=path, rotateLength=MAXIMUM_LOG_SIZE, maxRotatedFiles=MAX_LOG_FILES) observer = FileLogObserver(formatEvent=formatEventAsClassicLogText, outFile=logfile) return observer
class JunebugLoggerService(Service): '''Service for :class:`junebug.logging.JunebugLogObserver`''' log_observer = None def __init__(self, worker_id, path, rotate, max_files, logger=None): ''' Create the service for the Junebug Log Observer. :param str worker_id: ID of the worker to observe logs for. :param str path: Path to place the log files. :param int rotate: Size (in bytes) before rotating log file. :param int max_files: Maximum amount of log files before old log files start to get deleted. :param logger: logger to add observer to. Defaults to twisted.python.log.theLogPublisher :type logger: :class:`twisted.python.log.LogPublisher` ''' self.setName('Junebug Worker Logger') self.logger = logger if logger is not None else log.theLogPublisher self.worker_id = worker_id self.path = path self.rotate = rotate self.max_files = max_files def startService(self): if not os.path.exists(self.path): os.makedirs(self.path, 0755) self.logfile = LogFile( self.worker_id, self.path, rotateLength=self.rotate, maxRotatedFiles=self.max_files) self.log_observer = JunebugLogObserver(self.logfile, self.worker_id) self.logger.addObserver(self.log_observer) return super(JunebugLoggerService, self).startService() def stopService(self): if self.running: self.logger.removeObserver(self.log_observer) self.logfile.close() return super(JunebugLoggerService, self).stopService() def registered(self): return self.log_observer in self.logger.observers
def _openLogFile(self, path): try: from twisted.python.logfile import LogFile log.msg("Setting up http.log rotating %s files of %s bytes each" % (maxRotatedFiles, rotateLength)) if hasattr(LogFile, "fromFullPath"): # not present in Twisted-2.5.0 return LogFile.fromFullPath(path, rotateLength=rotateLength, maxRotatedFiles=maxRotatedFiles) else: log.msg("WebStatus: rotated http logs are not supported on this version of Twisted") except ImportError, e: log.msg("WebStatus: Unable to set up rotating http.log: %s" % e)
def __init__( self, worker_id, directory, rotateLength, maxRotatedFiles): self.worker_id = worker_id self.directory = directory self.rotateLength = rotateLength self.maxRotatedFiles = maxRotatedFiles self.closed_count = 0 self.logfile = LogFile( worker_id, directory, rotateLength=rotateLength, maxRotatedFiles=maxRotatedFiles) self.path = self.logfile.path
def startService(self): Service.startService(self) if self.filename != '-': self.logfile = LogFile.fromFullPath( self.filename, rotateLength=None, defaultMode=0o644) self.__previous_signal_handler = signal.signal( signal.SIGUSR1, self._signal_handler) else: self.logfile = sys.stdout self.observer = FileLogObserver(self.logfile) self.observer.start()
def startService(self): Service.startService(self) if self.filename != '-': self.logfile = LogFile.fromFullPath(self.filename, rotateLength=None, defaultMode=0o644) self.__previous_signal_handler = signal.signal( signal.SIGUSR1, self._signal_handler) else: self.logfile = sys.stdout self.observer = FileLogObserver(self.logfile) self.observer.start()
def __init__(self, logfilename): if logfilename is None: logFile = sys.stdout else: logFile = LogFile.fromFullPath(logfilename, rotateLength=None) # Override if signal is set to None or SIG_DFL (0) if not signal.getsignal(signal.SIGUSR1): def signalHandler(signal, frame): from twisted.internet import reactor reactor.callFromThread(logFile.reopen) signal.signal(signal.SIGUSR1, signalHandler) self.observer = log.FileLogObserver(logFile)
def setup_logging(): filename = LOG_SETTINGS['file_path'] log_file = LogFile.fromFullPath(filename, rotateLength=LOG_SETTINGS['max_bytes'], maxRotatedFiles=LOG_SETTINGS['max_backups'] ) if filename is not None else sys.stdout log_level = getattr(logging, LOG_SETTINGS['level']) observer = LevelFileLogObserver(log_file, log_level) observer.timeFormat = LOG_SETTINGS['time_format'] observer.start()
def _parse_file(self, kind, args): if args == "-": get_file = lambda: stdout else: path = FilePath(args) get_file = lambda: LogFile( path.basename(), path.dirname(), rotateLength=1024 * 1024 * 1024, maxRotatedFiles=10, ) return lambda reactor: FileDestination(get_file())
def setup_logging(): filename = LOG_SETTINGS['file_path'] log_file = LogFile.fromFullPath( filename, rotateLength=LOG_SETTINGS['max_bytes'], maxRotatedFiles=LOG_SETTINGS['max_backups'] ) if filename is not None else sys.stdout log_level = getattr(logging, LOG_SETTINGS['level']) observer = LevelFileLogObserver(log_file, log_level) observer.timeFormat = LOG_SETTINGS['time_format'] observer.start()
def opt_logfile(self, logfile_path): """ Log to a file. Log is written to ``stdout`` by default. The logfile directory is created if it does not already exist. """ logfile = FilePath(logfile_path) logfile_directory = logfile.parent() if not logfile_directory.exists(): logfile_directory.makedirs() self['logfile'] = LogFile.fromFullPath( logfile.path, rotateLength=LOGFILE_LENGTH, maxRotatedFiles=LOGFILE_COUNT, )
def setServiceParent(self, app): MultiService.setServiceParent(self, app) if config.ErrorLogEnabled: errorLogFile = LogFile.fromFullPath( config.ErrorLogFile, rotateLength=config.ErrorLogRotateMB * 1024 * 1024, maxRotatedFiles=config.ErrorLogMaxRotatedFiles ) errorLogObserver = FileLogObserver(errorLogFile).emit # Registering ILogObserver with the Application object # gets our observer picked up within AppLogger.start( ) app.setComponent(ILogObserver, errorLogObserver)
def msg(self, content=""): head = "=="*10+"START"+"=="*10 tail = "=="*10+"=END="+"=="*10 LogFile.write(self, "\n%d:%s\n" % (self.Counter, head)) for line in content.split('\n'): if line != "": LogFile.write(self, "%d:%s\n" % (self.Counter, line)) LogFile.write(self, "%d:%s\n" % (self.Counter, tail)) self.Counter += 1
def emmit(self, event): if event.get("isDebug", False) and is_production(): return # Don't log debug messages in production. location, prefix, log_time, log_time_str, entry_text = self.parse_from(event) folder, name = build_file(self.folder, location, self.extension) try: log = LogFile(name, folder) log.write("%s %s\n\t%s\n" % (prefix, log_time_str, entry_text.replace("\n", "\n\t\t"))) log.flush() log.close() finally: if event.get("transmit", False) and not event.get("isDebug", False): self.service.write_entry(location, log_time, entry_text)
def create_log_file(filename, directory, max_size, max_files, twistd_user, log_group): """Helper function to create twisted LogFiles and set file permissions Change the log file permissions to match our service user if one is defined. This is needed so that the service can rotate the log files. """ log_file = LogFile(filename, directory, rotateLength=max_size, maxRotatedFiles=max_files, defaultMode=0o640, data_type_text=True) if twistd_user is not None: uid, gid = _parse_user(twistd_user) if log_group: gid = _parse_group(log_group) os.chown(os.path.join(directory, filename), uid, gid) return log_file
def makeService(config): # install simple, blocking DNS resolver. from twisted.internet import reactor from twisted.internet.base import BlockingResolver reactor.installResolver(BlockingResolver()) try: # Create dbus mainloop import dbus.mainloop.glib dbus.mainloop.glib.DBusGMainLoop(set_as_default=True) except ImportError: warnings.warn('Failed to import the dbus module, some functionality might not work.') # Check if it is the right thing if not hasattr(config.module, 'Application'): raise usage.UsageError("Invalid application module: " + config.appName) # Instantiate the main application s = config.module.Application(config.appName, config.opts, config.appOpts) # Set quitflag s.quitFlag = launcher.QuitFlag(s.path("temp").child("quitflag")) # Set the name s.setName(config.appName) # make sure the relevant paths exist for kind in ["temp", "db"]: path = s.path(kind) if not path.exists(): path.createDirectory() # Set up logging logFile = s.path("logfile") if not logFile.parent().exists(): logFile.parent().createDirectory() filename = s.path("logfile").path if config.opts['no-logrotate']: observer = RotatableFileLogObserver(filename) else: logFile = LogFile.fromFullPath(filename, maxRotatedFiles=9) observer = log.FileLogObserver(logFile).emit log.addObserver(observer) return s
def start(self, logfile=None, application_name="ooniprobe"): from ooni.settings import config if not logfile: logfile = os.path.expanduser(config.basic.logfile) log_folder = os.path.dirname(logfile) if (not os.access(log_folder, os.W_OK) or (os.path.exists(logfile) and not os.access(logfile, os.W_OK))): # If we don't have permissions to write to the log_folder or # logfile. log_folder = config.running_path logfile = os.path.join(log_folder, "ooniprobe.log") self.log_filepath = logfile mkdir_p(log_folder) log_filename = os.path.basename(logfile) file_log_level = levels.get(config.basic.loglevel, levels['INFO']) stdout_log_level = levels['INFO'] if config.advanced.debug: stdout_log_level = levels['DEBUG'] if config.basic.rotate == 'daily': logfile = MyDailyLogFile(log_filename, log_folder) elif config.basic.rotate == 'length': logfile = LogFile(log_filename, log_folder, rotateLength=int( human_size_to_bytes( config.basic.rotate_length)), maxRotatedFiles=config.basic.max_rotated_files) else: logfile = open(os.path.join(log_folder, log_filename), 'a') self.fileObserver = MsecLogObserver(logfile, log_level=file_log_level) self.stdoutObserver = StdoutStderrObserver(sys.stdout, log_level=stdout_log_level) tw_log.startLoggingWithObserver(self.fileObserver.emit) tw_log.addObserver(self.stdoutObserver.emit) tw_log.msg("Starting %s on %s (%s UTC)" % (application_name, otime.prettyDateNow(), otime.prettyDateNowUTC()))
def postOptions(self): if self["journald"]: destination = JournaldDestination() else: if self["logfile"] is None: logfile = self._sys_module.stdout else: logfilepath = FilePath(self["logfile"]) logfilepath_directory = logfilepath.parent() if not logfilepath_directory.exists(): logfilepath_directory.makedirs() # A twisted.python.logfile which has write and flush methods # but which also rotates the log file. logfile = LogFile.fromFullPath( logfilepath.path, rotateLength=LOGFILE_LENGTH, maxRotatedFiles=LOGFILE_COUNT ) destination = FileDestination(file=logfile) self.eliot_destination = destination original_postOptions(self)
def run(inst, port=None, logdir=None): """Start the ``twisted`` event loop, with an HTTP server. :param inst: a :py:class:`~smap.core.SmapInstance` which you want to server. :param int port: port to run on :rtype: none; this function does not return """ if not port: port = int(smapconf.SERVER.get('port', 8085)) if not logdir: logdir = smapconf.SERVER.get('logdir', os.getcwd()) if not os.path.exists(logdir): os.makedirs(logdir) print "Logging to", logdir print "Starting server on port", port # Allow 50 1MB files observer = log.FileLogObserver(LogFile('sMAP.log', logdir, rotateLength=1000000, maxRotatedFiles=50)) log.startLogging(observer) # Start server inst.start() reactor.listenTCP(port, getSite(inst)) reactor.run()
def postOptions(self): if self['journald']: destination = JournaldDestination() else: if self['logfile'] is None: logfile = self._sys_module.stdout else: logfilepath = FilePath(self['logfile']) logfilepath_directory = logfilepath.parent() if not logfilepath_directory.exists(): logfilepath_directory.makedirs() # A twisted.python.logfile which has write and flush methods # but which also rotates the log file. logfile = LogFile.fromFullPath( logfilepath.path, rotateLength=LOGFILE_LENGTH, maxRotatedFiles=LOGFILE_COUNT, ) destination = FileDestination(file=logfile) self.eliot_destination = destination original_postOptions(self)
def receive_logs(self, logs, transport): client_mac = self.__stations[transport.client].mac; client_mac = client_mac.translate(None, ":|/\\") files = {} for time, location, msg in logs: if location in files: log_file = files[location] else: loc = [client_mac] + list(location) folder, name = build_file(self.__output_folder, loc, self.__extension) log_file = LogFile(name, folder) files[location] = log_file log_time_str = strftime("%Y-%m-%d %H:%M:%S:%f", gmtime(time)) log_file.write(">%s\n\t%s\n" % (log_time_str, msg.replace("\n", "\n\t\t"))) # Now close those files for log_file in files.itervalues(): log_file.flush() log_file.close() return {"accepted": True}
def _parse_file(self, kind, arg_text): # Reserve the possibility of an escape character in the future. \ is # the standard choice but it's the path separator on Windows which # pretty much ruins it in this context. Most other symbols already # have some shell-assigned meaning which makes them treacherous to use # in a CLI interface. Eliminating all such dangerous symbols leaves # approximately @. if u"@" in arg_text: raise ValueError( u"Unsupported escape character (@) in destination text ({!r})." .format(arg_text), ) arg_list = arg_text.split(u",") path_name = arg_list.pop(0) if path_name == "-": get_file = lambda: stdout else: path = FilePath(path_name) rotate_length = int( self._get_arg( u"rotate_length", 1024 * 1024 * 1024, arg_list, )) max_rotated_files = int( self._get_arg( u"max_rotated_files", 10, arg_list, )) get_file = lambda: LogFile( path.basename(), path.dirname(), rotateLength=rotate_length, maxRotatedFiles=max_rotated_files, ) return lambda reactor: FileDestination(get_file())
class LocalProcess(protocol.ProcessProtocol): def __init__(self, name,group): self.orgName = name self.name = "".join([x for x in name if x.isalnum()]) self.group = group self.logFile = LogFile(self.name+".log",group.groupDir,rotateLength=100000000,maxRotatedFiles=10)#10M self.updateLogFile = LogFile(self.name+".ulog",group.groupDir,rotateLength=100000000,maxRotatedFiles=5) self.ssLogFile = LogFile(self.name+".slog",group.groupDir,rotateLength=100000000,maxRotatedFiles=5) self.status = PROC_STATUS.STOP self.endTime = None self.startMemo = '' def connectionMade(self): self.status = PROC_STATUS.RUN#todo add support startCompletion check self._ssLog("startTime",datetime.now().strftime(TIME_FORMAT),self.startMemo) self.startMemo = '' global sendStatusFunc if sendStatusFunc: sendStatusFunc(self.group.name,self.orgName,self.status) def _writeLog(self,data): self.logFile.write("%s%s"%(data,CR)) def _ssLog(self,stage,time,memo=''): self.ssLogFile.write("%s%s%s%s%s%s"%(stage,SEP,time,SEP,memo,CR)) def logUpdate(self,fname): _updateLog(self.updateLogFile,fname) def outReceived(self, data): self._writeLog(data) def errReceived(self, data): self._writeLog("[ERROR DATA %s]:%s"%(datetime.now().strftime(TIME_FORMAT),data)) def childDataReceived(self, childFD, data): self._writeLog(data) def inConnectionLost(self): pass def outConnectionLost(self): pass def errConnectionLost(self): pass def childConnectionLost(self, childFD): pass def isRunning(self): return self.status == PROC_STATUS.RUN def processExited(self,reason): pass def processEnded(self,reason): self.endTime = datetime.now() if reason.value.exitCode is None: self._ssLog("endInfo","code is None,info:%s"% (reason)) elif reason.value.exitCode != 0 : self._ssLog("endInfo", "code:%d,info:%s"%(reason.value.exitCode,reason)) self.status = PROC_STATUS.STOP self._ssLog("endTime",self.endTime.strftime(TIME_FORMAT)) global sendStatusFunc if sendStatusFunc: sendStatusFunc(self.group.name,self.orgName,self.status) self.logFile.close() self.updateLogFile.close() self.ssLogFile.close() def signal(self,signalName): self.transport.signalProcess(signalName.name)
def initialize_twisted_logging(self): twenty_megabytes = 20000000 log_file = LogFile.fromFullPath(self.configuration['log_filename'], maxRotatedFiles=10, rotateLength=twenty_megabytes) log.startLogging(log_file)
wsgi_root = wsgi_resource() root = twresource.Root(wsgi_root) # Servce Django media files off of /media: staticrsrc = static.File(os.path.join(DJANGO_PROJECT_PATH, "Weblvn/media")) root.putChild("media", staticrsrc) # The cool part! Add in pure Twisted Web Resouce in the mix # This 'pure twisted' code could be using twisted's XMPP functionality, etc: #root.putChild("google", twresource.GoogleResource()) # my add (from buildbot runner.py): rotateLength = 10000000 maxRotatedFiles = 5 try: from twisted.python.logfile import LogFile from twisted.python.log import ILogObserver, FileLogObserver logfile = LogFile.fromFullPath(TWISTED_LOG, rotateLength=rotateLength, maxRotatedFiles=maxRotatedFiles) application.setComponent(ILogObserver, FileLogObserver(logfile).emit) except ImportError: # probably not yet twisted 8.2.0 and beyond, can't set log yet pass # Serve it up: main_site = server.Site(root) internet.TCPServer(PORT, main_site).setServiceParent(application)
def create_logfile(self): '''Creates and returns a temporary LogFile.''' return LogFile.fromFullPath(self.mktemp())
def __init__(self, serverPushCb, queue=None, path=None, filter=True, bufferDelay=1, retryDelay=5, blackList=None, filterFunc=None): """ @serverPushCb: callback to be used. It receives 'self' as parameter. It should call self.queueNextServerPush() when it's done to queue the next push. It is guaranteed that the queue is not empty when this function is called. @queue: a item queue that implements IQueue. @path: path to save config. @filter: when True (default), removes all "", None, False, [] or {} entries. @bufferDelay: amount of time events are queued before sending, to reduce the number of push requests rate. This is the delay between the end of a request to initializing a new one. @retryDelay: amount of time between retries when no items were pushed on last serverPushCb call. @blackList: events that shouldn't be sent. @filterFunc: optional function applied to items added to packet payload """ StatusReceiverMultiService.__init__(self) # Parameters. self.queue = queue if self.queue is None: self.queue = MemoryQueue() self.queue = IndexedQueue(self.queue) self.path = path self.filter = filter self.bufferDelay = bufferDelay self.retryDelay = retryDelay if not callable(serverPushCb): raise NotImplementedError('Please pass serverPushCb parameter.') def hookPushCb(): # Update the index so we know if the next push succeed or not, don't # update the value when the queue is empty. if not self.queue.nbItems(): return self.lastIndex = self.queue.getIndex() return serverPushCb(self) self.serverPushCb = hookPushCb self.blackList = blackList self.filterFunc = filterFunc # Other defaults. # IDelayedCall object that represents the next queued push. self.task = None self.stopped = False self.lastIndex = -1 self.state = {} self.state['started'] = str(datetime.datetime.utcnow()) self.state['next_id'] = 1 self.state['last_id_pushed'] = 0 # Try to load back the state. if self.path and os.path.isdir(self.path): state_path = os.path.join(self.path, 'state') if os.path.isfile(state_path): self.state.update(json.load(open(state_path, 'r'))) if self.queue.nbItems(): # Last shutdown was not clean, don't wait to send events. self.queueNextServerPush() self.verboseLog = LogFile.fromFullPath( 'status_push.log', rotateLength=10*1024*1024, maxRotatedFiles=14)
def __init__(self, serverPushCb, queue=None, path=None, filter=True, bufferDelay=1, retryDelay=5, blackList=None, filterFunc=None): """ @serverPushCb: callback to be used. It receives 'self' as parameter. It should call self.queueNextServerPush() when it's done to queue the next push. It is guaranteed that the queue is not empty when this function is called. @queue: a item queue that implements IQueue. @path: path to save config. @filter: when True (default), removes all "", None, False, [] or {} entries. @bufferDelay: amount of time events are queued before sending, to reduce the number of push requests rate. This is the delay between the end of a request to initializing a new one. @retryDelay: amount of time between retries when no items were pushed on last serverPushCb call. @blackList: events that shouldn't be sent. @filterFunc: optional function applied to items added to packet payload """ StatusReceiverMultiService.__init__(self) # Parameters. self.queue = queue if self.queue is None: self.queue = MemoryQueue() self.queue = IndexedQueue(self.queue) self.path = path self.filter = filter self.bufferDelay = bufferDelay self.retryDelay = retryDelay if not callable(serverPushCb): raise NotImplementedError('Please pass serverPushCb parameter.') def hookPushCb(): # Update the index so we know if the next push succeed or not, don't # update the value when the queue is empty. if not self.queue.nbItems(): return self.lastIndex = self.queue.getIndex() return serverPushCb(self) self.serverPushCb = hookPushCb self.blackList = blackList self.filterFunc = filterFunc # Other defaults. # IDelayedCall object that represents the next queued push. self.task = None self.stopped = False self.lastIndex = -1 self.state = {} self.state['started'] = str(datetime.datetime.utcnow()) self.state['next_id'] = 1 self.state['last_id_pushed'] = 0 # Try to load back the state. if self.path and os.path.isdir(self.path): state_path = os.path.join(self.path, 'state') if os.path.isfile(state_path): self.state.update(json.load(open(state_path, 'r'))) if self.queue.nbItems(): # Last shutdown was not clean, don't wait to send events. self.queueNextServerPush() self.verboseLog = LogFile.fromFullPath('status_push.log', rotateLength=10 * 1024 * 1024, maxRotatedFiles=14)
from twisted.protocols.amp import AMP, Command, Integer, String from twisted.python.usage import Options, UsageError from twisted.internet import reactor, task, utils from twisted.internet.protocol import ServerFactory import sys,re from twisted.application import service, internet from simplebb.server import main rotateLength = 1000000 maxRotatedFiles = 5 from twisted.python.logfile import LogFile from twisted.python.log import ILogObserver, FileLogObserver application = service.Application("My app") logfile = LogFile.fromFullPath("server.log", rotateLength=rotateLength, maxRotatedFiles=maxRotatedFiles) application.setComponent(ILogObserver, FileLogObserver(logfile).emit) service.IProcess(application).processName = "simplebbsd" s = main(use_tac=True) s.setServiceParent(application)
def _openLogFile(self, path): return LogFile.fromFullPath(path, rotateLength=self.rotateLength, maxRotatedFiles=self.maxRotatedFiles)
def _openLogFile(self, path): self._nativeize = True return LogFile.fromFullPath( path, rotateLength=self.rotateLength, maxRotatedFiles=self.maxRotatedFiles)
#conn.create_tags(['r-75625877'], {'Name':'aaaaaaaaa'}) def request_instances(): ret = conn.run_instances( "ami-a579efa4", min_count=1, max_count=1, #key_name='favbuykey', #security_groups=['sg-5d0b7d5c'], security_group_ids=['sg-5d0b7d5c'], #instance_profile_name = "aa", instance_type="t1.micro", #user_data=get_init_script(*(NUMS.get(itype, (10, 10))),burst=burst) ) return ret if __name__ == "__main__": log.startLogging(sys.stdout) reactor.callWhenRunning(start) reactor.run() if __name__ == "__builtin__": from twisted.python.log import ILogObserver, FileLogObserver from twisted.python.logfile import DailyLogFile, LogFile reactor.callWhenRunning(start) application = service.Application('ec2_schd') logfile = LogFile("ec2_schd.log", "/var/log/", rotateLength=100000000000) application.setComponent(ILogObserver, FileLogObserver(logfile).emit)
def utilityMain( configFileName, serviceClass, reactor=None, serviceMaker=None, patchConfig=None, onShutdown=None, verbose=False, loadTimezones=False, ): """ Shared main-point for utilities. This function will: - Load the configuration file named by C{configFileName}, - launch a L{CalDAVServiceMaker}'s with the C{ProcessType} of C{"Utility"} - run the reactor, with start/stop events hooked up to the service's C{startService}/C{stopService} methods. It is C{serviceClass}'s responsibility to stop the reactor when it's complete. @param configFileName: the name of the configuration file to load. @type configuration: C{str} @param serviceClass: a 1-argument callable which takes an object that provides L{ICalendarStore} and/or L{IAddressbookStore} and returns an L{IService}. @param patchConfig: a 1-argument callable which takes a config object and makes and changes necessary for the tool. @param onShutdown: a 0-argument callable which will run on shutdown. @param reactor: if specified, the L{IReactorTime} / L{IReactorThreads} / L{IReactorTCP} (etc) provider to use. If C{None}, the default reactor will be imported and used. """ from calendarserver.tap.caldav import CalDAVServiceMaker, CalDAVOptions if serviceMaker is None: serviceMaker = CalDAVServiceMaker # We want to validate that the actual service is always an instance of WorkerService, so wrap the # service maker callback inside a function that does that check def _makeValidService(store): service = serviceClass(store) assert isinstance(service, WorkerService) return service # Install std i/o observer if verbose: observer = StandardIOObserver() observer.start() if reactor is None: from twisted.internet import reactor try: config = loadConfig(configFileName) if patchConfig is not None: patchConfig(config) checkDirectories(config) utilityLogFile = LogFile.fromFullPath( config.UtilityLogFile, rotateLength=config.ErrorLogRotateMB * 1024 * 1024, maxRotatedFiles=config.ErrorLogMaxRotatedFiles, ) utilityLogObserver = FileLogObserver(utilityLogFile) utilityLogObserver.start() config.ProcessType = "Utility" config.UtilityServiceClass = _makeValidService autoDisableMemcached(config) maker = serviceMaker() # Only perform post-import duties if someone has explicitly said to maker.doPostImport = getattr(maker, "doPostImport", False) options = CalDAVOptions service = maker.makeService(options) reactor.addSystemEventTrigger("during", "startup", service.startService) reactor.addSystemEventTrigger("before", "shutdown", service.stopService) if onShutdown is not None: reactor.addSystemEventTrigger("before", "shutdown", onShutdown) if loadTimezones: TimezoneCache.create() except (ConfigurationError, OSError), e: sys.stderr.write("Error: %s\n" % (e,)) return
from twisted.python import log from twisted.web import static, server from twisted.web.resource import Resource from twisted.python.logfile import LogFile try: from autobahn.wamp import WampServerFactory except ImportError: # autobahn 0.8.0+ from autobahn.wamp1.protocol import WampServerFactory sys.path.append("/etc/yadtbroadcast-server") from broadcastserverconfig import LOG_FILE, CACHE_FILE, WS_PORT, DOCROOT_DIR, HTTP_PORT log.startLogging(LogFile.fromFullPath(LOG_FILE)) import yadtbroadcastserver try: os.makedirs(os.path.dirname(CACHE_FILE)) except exceptions.OSError, e: if e.errno != 17: log.err() try: os.makedirs(os.path.dirname(LOG_FILE)) except exceptions.OSError, e: if e.errno != 17: log.err() # TODO refactor: use util method in ws lib for url creation
from twisted.application import service from twisted.python.log import ILogObserver from twisted.python.logfile import LogFile from tx_logging.observers import LevelFileLogObserver from commander.service import RootService from commander.settings import COMMANDER_LOG as LOG_SETTINGS # Init logging ---------------------------------------------------------------- filename = LOG_SETTINGS['filename'] log_file = LogFile.fromFullPath( filename, rotateLength=LOG_SETTINGS['maxBytes'], maxRotatedFiles=LOG_SETTINGS['backupCount'] ) if filename is not None else sys.stdout log_level = getattr(logging, LOG_SETTINGS['level']) observer = LevelFileLogObserver(log_file, log_level) observer.timeFormat = LOG_SETTINGS['timeFormat'] # Init application ------------------------------------------------------------ application = service.Application("IL-2 Events Commander") application.setComponent(ILogObserver, observer.emit) # Init commander service ------------------------------------------------------
def __init__(self, name, directory): self.Counter = 1 LogFile.__init__(self, name, directory, maxRotatedFiles=10)
syslog_conf = (syslog_conf[0], int(syslog_conf[1])) else: syslog_conf = syslog_conf[0] init_logger( syslog_conf ) ''' from server import Server from datetime import datetime server = Server() reactor.addSystemEventTrigger('before', 'shutdown', server.cleanup) application = service.Application(SERVER_NAME) log_path = config.log_path log_rotate = int(config.log_rotate_interval) logfile = LogFile('message.log', log_path, rotateLength=log_rotate) logOb = FileLogObserver(logfile) logOb.formatTime = lambda when: datetime.fromtimestamp(when).strftime( '%m/%d %T.%f') application.setComponent(ILogObserver, logOb.emit) internet.TCPServer(config.port, server, interface=config.interface).setServiceParent( service.IServiceCollection(application)) internet.TCPServer(config.adminport, ShellFactory(), interface=config.interface).setServiceParent( service.IServiceCollection(application))
def _openLogFile(self, path): self._nativeize = True return LogFile.fromFullPath(path, rotateLength=self.rotateLength, maxRotatedFiles=self.maxRotatedFiles)
print "registered client " + client.peerstr self.clients.append(client) def unregister(self, client): if client in self.clients: print "unregistered client " + client.peerstr self.clients.remove(client) def start(): factory = StoreServerFactory("ws://localhost:9000", debugWamp=True) factory.protocol = RepeaterServerProtocol factory.setProtocolOptions(allowHixie76=True) listenWS(factory) if __name__ == "__main__": log.startLogging(sys.stdout) reactor.callWhenRunning(start) reactor.run() if __name__ == "__builtin__": from twisted.python.log import ILogObserver, FileLogObserver from twisted.python.logfile import DailyLogFile, LogFile reactor.callWhenRunning(start) application = service.Application('server_admin') logfile = LogFile("server_admin.log", "/var/log/", rotateLength=100000000000) application.setComponent(ILogObserver, FileLogObserver(logfile).emit)