def stage_configure(self): self.c = self.friend.config if self.c.agency.daemonize: os.chdir(self.c.agency.rundir) dbc = self.c.db assert isinstance(dbc, config.DbConfig), str(type(dbc)) self._db = driver.Database(dbc.host, int(dbc.port), dbc.name) self._journaler = journaler.Journaler( on_rotate_cb=self.friend._force_snapshot_agents, on_switch_writer_cb=self.friend._on_journal_writer_switch, hostname=self.friend.get_hostname()) # add the journaler to the LogTee which is the default keeper # dump the buffer with entries so far and remove it from the tee # at this point in future if we decide not to log to text files # we should remove the 'flulog' keeper from the tee as well tee = log.get_default() # FIXME: get_keeper is not an ILogWhatever method, only Tee has it try: buff = tee.get_keeper('buffer') buff.dump(self._journaler) buff.clean() tee.remove_keeper('buffer') tee.add_keeper('journaler', self._journaler) except AttributeError: self.warning('Programmer error, interface disrespect')
def __init__(self, host, port=None, protocol=None, security_policy=None, logger=None, reactor=None): logger = logger or log.get_default() or log.FluLogKeeper() log.LogProxy.__init__(self, logger) log.Logger.__init__(self, logger) self.reactor = reactor or treactor self._host = host self._port = port self._security_policy = security.ensure_policy(security_policy) if self._security_policy.use_ssl: self._http_scheme = http.Schemes.HTTPS else: self._http_scheme = http.Schemes.HTTP if self._port is None: if self._http_scheme is http.Schemes.HTTP: self._port = 80 if self._http_scheme is http.Schemes.HTTPS: self._port = 443 proto = self.default_http_protocol if protocol is None else protocol self._http_protocol = proto self._protocol = None self._pending = 0 self.log_name = '%s:%d (%s)' % ( self._host, self._port, self._http_scheme.name)
def __init__(self, host, port, db_name, username=None, password=None, https=False): common.ConnectionManager.__init__(self) log.LogProxy.__init__(self, log.get_default() or log.FluLogKeeper()) ChangeListener.__init__(self, self) self.couchdb = None self.db_name = None self.version = None self.host = None self.port = None self.https = None # name -> Notifier self.notifiers = dict() # this flag is prevents reconnector from being spawned self.disconnected = False self.retry = 0 self.reconnector = None # doc_id -> list of tuples (Filter, rev, deleted) # The list is added when we start modifying the document, # all the notificactions received in the meantime will be # stored in this hash, until change is done, this solves # the problem with caused by change notification received # before the http request modifying the document is finished self._pending_notifications = dict() # doc_id -> C{int} number of locks self._document_locks = dict() self._cache = Cache(desired_size=self.DESIRED_CACHE_SIZE) self._configure(host, port, db_name, username, password, https)
def stage_internals(self): tee = log.get_default() try: tee.remove_keeper('journaler') except KeyError: pass return self.friend._disconnect()
def __init__(self, hostname, port, server_name='', log_keeper=None, prefix=None, interface='', apiprefix=None, thread_stats_file=None, **kwargs): self.hostname = hostname self._prefix = prefix server_name = server_name or hostname if log_keeper is None: log_keeper = log.get_default() if thread_stats_file: self.thread_stats = stats.Statistics(log_keeper, thread_stats_file) else: self.thread_stats = None self.threadpool = threadpool.ThreadPool(logger=log_keeper, init_thread=self._init_thread, statistics=self.thread_stats) self.res = Root(self, server_name, prefix=prefix, apiprefix=apiprefix) webserver.Server.__init__(self, port, self.res, log_keeper=log_keeper, interface=interface, **kwargs)
def __init__(self, jourfile=None, tunneling_version=None, tunneling_bridge=None): log_keeper = log.get_default() or log.FluLogKeeper() log.LogProxy.__init__(self, log_keeper) log.Logger.__init__(self, self) Commands.__init__(self) self._messaging = emu.RabbitMQ() self._tunneling_version = tunneling_version self._tunneling_bridge = tunneling_bridge or tunneling.Bridge() self._database = database.Database() jouropts = dict() if jourfile: jouropts['filename'] = jourfile jouropts['encoding'] = 'zip' self._jourwriter = journaler.SqliteWriter(self, **jouropts) self._journaler = journaler.Journaler() self._output = Output() self._parser = manhole.Parser(self, self._output, self, self.finished_processing) self._agencies = list() self._breakpoints = dict() self._dependency_references = list() # uuid replacement for host agents self._counter = 0
def __init__(self, root, port_range=None, hostname=None, static_path=None, security_policy=None, log_keeper=None, label=None, web_statistics=None, interface=''): log.Logger.__init__(self, self) log.LogProxy.__init__(self, log_keeper or log.get_default()) self._root = root self._label = label if not static_path: from feat.configure import configure static_path = configure.gatewaydir self._static_path = static_path tmp_range = port_range if isinstance(tmp_range, int): tmp_range = (tmp_range, tmp_range) try: min_port, max_port = tmp_range if not (isinstance(min_port, int) and isinstance(max_port, int) and min_port <= max_port): raise ValueError() self._ports = (min_port, max_port) except ValueError: raise ValueError("Invalid gateway port/range specified: %r" % (port_range, )) self._host = hostname or socket.gethostbyaddr(socket.gethostname())[0] self._security = security.ensure_policy(security_policy) self._server = None self._interface = interface self._statistics = (web_statistics and webserver.IWebStatistics(web_statistics))
def __init__(self, journal, agent_id, inject_dummy_externals=False): log_keeper = log.get_default() or log.FluLogKeeper() log.LogProxy.__init__(self, log_keeper) log.Logger.__init__(self, self) self.journal = journal self.unserializer = banana.Unserializer(externalizer=self) self.serializer = banana.Serializer(externalizer=self) self.inject_dummy_externals = inject_dummy_externals self.agent_type = None self.agent_id = agent_id Factory(self, "agent-medium", AgencyAgent) Factory(self, "db-connection", Connection) Factory(self, "replier-medium", AgencyReplier) Factory(self, "requester-medium", AgencyRequester) Factory(self, "contractor-medium", AgencyContractor) Factory(self, "manager-medium", AgencyManager) Factory(self, "retrying-protocol", RetryingProtocol) Factory(self, "periodic-protocol", PeriodicProtocol) Factory(self, "task-medium", AgencyTask) Factory(self, "collector-medium", AgencyCollector) Factory(self, "poster-medium", AgencyPoster) self.reset()
def __init__(self, host, port, db_name): common.ConnectionManager.__init__(self) log.LogProxy.__init__(self, log.get_default() or log.FluLogKeeper()) ChangeListener.__init__(self, self) self.paisley = None self.db_name = None self.host = None self.port = None # name -> Notifier self.notifiers = dict() self.retry = 0 self.reconnector = None # doc_id -> list of tuples (Filter, rev, deleted) # The list is added when we start modifying the document, # all the notificactions received in the meantime will be # stored in this hash, until change is done, this solves # the problem with caused by change notification received # before the http request modifying the document is finished self._pending_notifications = dict() # doc_id -> C{int} number of locks self._document_locks = dict() self._configure(host, port, db_name)
def __init__(self, methodName='runTest'): log.FluLogKeeper.init('test.log') log.set_default(log.FluLogKeeper()) log.Logger.__init__(self, log.get_default()) unittest.TestCase.__init__(self, methodName) self.debug('SeleniumTest.__init__: finished')
def tearDown(self): tee = log.get_default() tee.remove_keeper("test-buffer") self.keeper.clean() try: os.remove(run.get_pidpath(os.path.curdir, "dummy_process")) except OSError: pass yield common.TestCase.tearDown(self)
def __init__(self): journal.DummyRecorderNode.__init__(self) log.LogProxy.__init__(self, log.get_default()) log.Logger.__init__(self, self) self.bid_sent = None self.handover_sent = None self.refusal_sent = None self.defect_sent = None self.report_sent = None self.updated_address = None
def __init__(self): self.log_name = self.name self.module = self.__module__ log.Logger.__init__(self, log.get_default()) self._restorators = serialization.get_registry() self._agents = get_agent_registry() self._views = get_view_registry() self._initial_data = get_initial_data_registry() self._adapters = iadapter.AdapterRegistry() self._models = model.get_registry() self._migrations = migration.get_registry()
def __init__(self): common.ConnectionManager.__init__(self) log_keeper = log.get_default() or log.FluLogKeeper() log.LogProxy.__init__(self, log_keeper) log.Logger.__init__(self, self) common.Statistics.__init__(self) # name -> queue self._queues = {} # name -> exchange self._exchanges = {} self._on_connected() self._enabled = True
def __init__(self, host, port, user='******', password='******', timeout=5): ConnectionManager.__init__(self) log.LogProxy.__init__(self, log.get_default() or log.FluLogKeeper()) log.Logger.__init__(self, self) self._user = user self._password = password self._host = host self._port = port self._timeout_connecting = timeout self._factory = AMQFactory(self, TwistedDelegate(), self._user, self._password, on_connected=self._on_connected, on_disconnected=self._on_disconnected)
def __init__(self, version=None, registry=None): common.ConnectionManager.__init__(self) log.LogProxy.__init__(self, log.get_default() or log.FluLogKeeper()) log.Logger.__init__(self, self) ver = version if version is not None else feat.version self._version = int(ver) self._registry = registry self._channel = None self._pending_dispatches = 0 self._route = None # "established connections" # Recipient -> route self._uris = dict()
def __init__(self, log_keeper=None, filename=":memory:"): log.Logger.__init__(self, log_keeper or log.get_default()) self.storage = SqliteStorage(filename) # job_id -> epoch_created self.waiting = dict() # job_id -> epoch_started self.processing = dict() # number_of_threads * time_of_operation self._uptime_snapshot = 0 # snapshot when the uptime was last time updated self._uptime_snapshot_epoch = time.time() self.number_of_threads = 0 # busy time (the time the threadpool is actually doing some job) self.busy_time = 0
def __init__(self, methodName=' impossible-name '): log_keeper = log.get_default() or log.FluLogKeeper() log.LogProxy.__init__(self, log_keeper) log.Logger.__init__(self, self) # Twisted changed the TestCase.__init__ signature several # times. # # In versions older than 2.1.0 there was no __init__ method. # # In versions 2.1.0 up to 2.4.0 there is a __init__ method # with a methodName kwarg that has a default value of None. # # In version 2.5.0 the default value of the kwarg was changed # to "runTest". # # In versions above 2.5.0 God only knows what's the default # value, as we do not currently support them. import inspect if not inspect.ismethod(unittest.TestCase.__init__): # it's Twisted < 2.1.0 unittest.TestCase.__init__(self) else: # it's Twisted >= 2.1.0 if methodName == ' impossible-name ': # we've been called with no parameters, use the # default parameter value from the superclass defaults = inspect.getargspec(unittest.TestCase.__init__)[3] methodName = defaults[0] unittest.TestCase.__init__(self, methodName=methodName) self.log_name = self.id() # Skip slow tests if '--skip-slow' option is enabled if _getConfig().get('skip-slow'): if self.getSlow() and not self.getSkip(): self.skip = 'slow test' # Handle configurable attributes for attr in self.configurable_attributes: value = util.acquireAttribute(self._parents, attr, None) if value is not None: setattr(self, attr, value)
def __init__(self, hostname, port, server_name='', log_keeper=None, prefix=None, interface='', apiprefix=None, thread_stats_file=None, **kwargs): self.hostname = hostname self._prefix = prefix server_name = server_name or hostname if log_keeper is None: log_keeper = log.get_default() if thread_stats_file: self.thread_stats = stats.Statistics(log_keeper, thread_stats_file) else: self.thread_stats = None self.threadpool = threadpool.ThreadPool( logger=log_keeper, init_thread=self._init_thread, statistics=self.thread_stats) self.res = Root(self, server_name, prefix=prefix, apiprefix=apiprefix) webserver.Server.__init__(self, port, self.res, log_keeper=log_keeper, interface=interface, **kwargs)
def __exit__(self, type, value, traceback): if type is not None: raise type(value), None, traceback if self.opts.agency_daemonize: tmp = tempfile.mktemp(suffix="feat.temp.log") log.info("run", "Logging will temporarily be done to: %s", tmp) run.daemonize(stdout=tmp, stderr=tmp) # dump all the log entries logged so far to the FluLogKeeper again # the reason for this is that we want them to be included in text # file (so far they have been printed to the console) tee = log.get_default() buff = tee.get_keeper('buffer') flulog = tee.get_keeper('flulog') buff.dump(flulog) # use the resolver from twisted.names instead of the default # the reason for this is that ThreadedResolver behaves strangely # after the reconnection - raises the DNSLookupError for names # which have been resolved while there was no connection resolver.installResolver(reactor) reactor.run()
def __init__(self): common.ConnectionManager.__init__(self) log.LogProxy.__init__(self, log.get_default() or log.FluLogKeeper()) ChangeListener.__init__(self, self) common.Statistics.__init__(self) # id -> document self._documents = {} # id -> name -> body self._attachments = {} # id -> view_name -> (key, value) self._view_cache = {} self._on_connected() # type_name -> int, used for generating nice agent IDs in # simulations self._doc_type_counters = dict() # list of all old revisions self._changes = list()
def __init__(self, port_or_range, hostname=None, security_policy=None, log_keeper=None): log_keeper = log_keeper or log.get_default() or log.FluLogKeeper() log.LogProxy.__init__(self, log_keeper) log.Logger.__init__(self, log_keeper) if hostname is None: hostname = socket.gethostbyaddr(socket.gethostname())[0] if isinstance(port_or_range, int): port_range = [port_or_range] else: port_range = port_or_range self._hostname = hostname self._port_range = port_range self._scheme = None self._security = security.ensure_policy(security_policy) self._factory = None self._port = None
def wait_for_ex(check, timeout, freq, args=(), kwargs={}, logger=None): if logger is None: logger = log.Logger(log.get_default()) assert callable(check) waiting = 0 while True: value = yield check(*args, **kwargs) if value: logger.log('Check %r positive, continuing.', check.__name__) break logger.log('Check %r still negative, sleeping %r seconds.', check.__name__, freq) waiting += freq if waiting > timeout: raise defer.TimeoutError('Timeout error waiting for check %r.' % check.__name__) d = defer.Deferred() call_later(freq, d.callback, None) yield d
def testDefaultLogging(self): keeper = DummyLogKeeper() current = log.get_default() log.set_default(keeper) self.addCleanup(log.set_default, current) log.log("foo", "1") log.debug("bar", "2", 42) log.info("spam", "3") log.warning("bacon", "4", 2, 3, 5) log.error("eggs", "4") self.assertEqual( keeper.entries, [ (LogLevel.log, None, "foo", "1", (), 1), (LogLevel.debug, None, "bar", "2", (42,), 1), (LogLevel.info, None, "spam", "3", (), 1), (LogLevel.warning, None, "bacon", "4", (2, 3, 5), 1), (LogLevel.error, None, "eggs", "4", (), 1), ], )
def setUp(self): yield IntegrationTest.setUp(self) tee = log.get_default() tee.add_keeper('buffer', log.LogBuffer(limit=10000)) self.tempdir = os.path.abspath(os.path.curdir) self.socket_path = os.path.abspath( os.path.join(os.path.curdir, 'feat-test.socket')) bin_dir = os.path.abspath(os.path.join( os.path.curdir, '..', '..', 'bin')) os.environ["PATH"] = ":".join([bin_dir, os.environ["PATH"]]) if self.start_rabbit: self.start_rabbit_process() if self.start_couch: self.start_couch_process() if self.run_couch: self.db_host, self.db_port, self.db_name =\ yield self.run_and_configure_db() else: self.db_host, self.db_name = '127.0.0.1', 'test' self.db_port = self.db_process.get_free_port() if self.run_rabbit: self.msg_host, self.msg_port = yield self.run_and_configure_msg() else: self.msg_host = '127.0.0.1' self.msg_port = self.msg_process.get_free_port() self.jourfile = "%s.sqlite3" % (self._testMethodName, ) self.pid_path = os.path.abspath( os.path.join(os.path.curdir, 'feat.pid'))
def __init__(self, **kwargs): log.Logger.__init__(self, log.get_default()) formatable.Formatable.__init__(self, **kwargs)
import time from feat.common import log class Benchmark(log.Logger): log_category = 'benchmark' def __init__(self, log_keeper): log.Logger.__init__(self, log_keeper) self._last = None def report(self, *what): t = time.time() if self._last is None: self._last = t self._start = t delta = t - self._last self._last = t from_start = t - self._start self.info("%f %f " + what[0], from_start, delta, *what[1:]) benchmark = Benchmark(log.get_default())
def setUp(self): yield common.TestCase.setUp(self) tee = log.get_default() self.keeper = TestLogBuffer(self, limit=10000) tee.add_keeper("test-buffer", self.keeper) self.cmd = os.path.join(os.path.dirname(dummy_process.__file__), "dummy_process.py")