def test_get_logger_should_use_default_name_when_name_not_specified(self): logger = stacklog.get_logger(None, is_parent=True) self.assertEquals(logger.name, stacklog.default_logger_name) stacklog.set_default_logger_name("default") logger = stacklog.get_logger(None, is_parent=True) self.assertEquals(logger.name, "default")
def test_get_logger_should_create_queue_logger_for_child(self): logger_name = "logger" stacklog.get_logger(logger_name, is_parent=True) child_logger = stacklog.get_logger(logger_name, is_parent=False) self.assertIsInstance(child_logger.handlers[0], stacklog.QueueHandler) self.assertEqual(child_logger.name, "child_logger") self.assertEquals(child_logger.level, logging.DEBUG)
def setup_mock_log(self, name=None): if name is None: stacklog.get_logger(name=mox.IgnoreArg(), is_parent=False).AndReturn(self.log) else: stacklog.get_logger(name=name, is_parent=False).AndReturn(self.log)
def test_verify_for_range_with_callback(self): mock_logger = self._setup_mock_logger() self.mox.StubOutWithMock(mock_logger, "info") stacklog.get_logger("verifier", is_parent=False).AndReturn(mock_logger) mock_logger.info("glance: Adding 0 per-owner exists to queue.") mock_logger.info("glance: Adding 2 per-owner exists to queue.") callback = self.mox.CreateMockAnything() when_max = datetime.datetime.utcnow() models.ImageExists.SENT_VERIFYING = "sent_verifying" models.ImageExists.SENT_UNVERIFIED = "sent_unverified" models.ImageExists.PENDING = "pending" models.ImageExists.VERIFYING = "verifying" exist1 = self.mox.CreateMockAnything() exist2 = self.mox.CreateMockAnything() exist3 = self.mox.CreateMockAnything() results = {"owner1": [exist1, exist2], "owner2": [exist3]} models.ImageExists.find_and_group_by_owner_and_raw_id( ending_max=when_max, status=models.ImageExists.SENT_UNVERIFIED ).AndReturn([]) models.ImageExists.find_and_group_by_owner_and_raw_id( ending_max=when_max, status=models.ImageExists.PENDING ).AndReturn(results) exist1.save() exist2.save() exist3.save() self.pool.apply_async(glance_verifier._verify, args=([exist3],), callback=callback).InAnyOrder() self.pool.apply_async(glance_verifier._verify, args=([exist1, exist2],), callback=callback).InAnyOrder() self.mox.ReplayAll() self.glance_verifier.verify_for_range(when_max, callback=callback) self.assertEqual(exist1.status, "verifying") self.assertEqual(exist2.status, "verifying") self.assertEqual(exist3.status, "verifying") self.mox.VerifyAll()
def test_verify_for_range_with_callback(self): mock_logger = self._setup_mock_logger() self.mox.StubOutWithMock(mock_logger, 'info') stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger) mock_logger.info('glance: Adding 0 per-owner exists to queue.') mock_logger.info('glance: Adding 2 per-owner exists to queue.') callback = self.mox.CreateMockAnything() when_max = datetime.datetime.utcnow() models.ImageExists.SENT_VERIFYING = 'sent_verifying' models.ImageExists.SENT_UNVERIFIED = 'sent_unverified' models.ImageExists.PENDING = 'pending' models.ImageExists.VERIFYING = 'verifying' exist1 = self.mox.CreateMockAnything() exist2 = self.mox.CreateMockAnything() exist3 = self.mox.CreateMockAnything() results = {'owner1': [exist1, exist2], 'owner2': [exist3]} models.ImageExists.find_and_group_by_owner_and_raw_id( ending_max=when_max, status=models.ImageExists.SENT_UNVERIFIED).AndReturn([]) models.ImageExists.find_and_group_by_owner_and_raw_id( ending_max=when_max, status=models.ImageExists.PENDING).AndReturn(results) exist1.save() exist2.save() exist3.save() for value in results.values(): self.pool.apply_async(glance_verifier._verify, args=(value,), callback=callback) self.mox.ReplayAll() self.glance_verifier.verify_for_range( when_max, callback=callback) self.assertEqual(exist1.status, 'verifying') self.assertEqual(exist2.status, 'verifying') self.assertEqual(exist3.status, 'verifying') self.mox.VerifyAll()
def test_run_no_notifications(self): mock_logger = self._create_mock_logger() stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger) self.mox.StubOutWithMock(self.verifier_without_notifications, '_run') self.verifier_without_notifications._run() self.mox.ReplayAll() self.verifier_without_notifications.run() self.mox.VerifyAll()
def test_run_notifications_with_routing_keys(self): mock_logger = self._create_mock_logger() stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger) self._mock_exchange_create_and_connect(self.verifier_with_notifications) self.mox.StubOutWithMock(self.verifier_with_notifications, '_run') self.verifier_with_notifications._run(callback=mox.Not(mox.Is(None))) self.mox.ReplayAll() self.verifier_with_notifications.run() self.mox.VerifyAll()
def test_get_logger_should_create_timed_rotating_logger_for_parent(self): logger_name = "logger" logger = stacklog.get_logger(logger_name, is_parent=True) self.assertIsInstance(logger.handlers[0], logging.handlers.TimedRotatingFileHandler) self.assertEquals(logger.handlers[0].when, "MIDNIGHT") self.assertEquals(logger.handlers[0].interval, 86400) self.assertEquals(logger.handlers[0].backupCount, 6) self.assertEqual(logger.name, "logger") self.assertEquals(logger.level, logging.DEBUG)
def test_run_full(self): mock_logger = self._create_mock_logger() mock_logger.info('exchange: N: None, P: 0, S: 2, E: 0') stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger) stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger) self.mox.StubOutWithMock(transaction, 'commit_on_success') tran = self.mox.CreateMockAnything() tran.__enter__().AndReturn(tran) tran.__exit__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()) transaction.commit_on_success().AndReturn(tran) self._mock_exchange_create_and_connect(self.verifier_with_notifications) self.verifier_with_notifications.exchange().AndReturn('exchange') self.mox.StubOutWithMock(self.verifier_with_notifications, '_keep_running') self.verifier_with_notifications._keep_running().AndReturn(True) start = datetime.datetime.utcnow() self.mox.StubOutWithMock(self.verifier_with_notifications, '_utcnow') self.verifier_with_notifications._utcnow().AndReturn(start) self.verifier_with_notifications._utcnow().AndReturn(start) settle_offset = {SETTLE_UNITS: SETTLE_TIME} ending_max = start - datetime.timedelta(**settle_offset) self.mox.StubOutWithMock(self.verifier_with_notifications, 'verify_for_range') self.verifier_with_notifications.verify_for_range(ending_max, callback=mox.Not(mox.Is(None))) self.mox.StubOutWithMock(self.verifier_with_notifications, 'reconcile_failed') result1 = self.mox.CreateMockAnything() result2 = self.mox.CreateMockAnything() self.verifier_with_notifications.results = [result1, result2] result1.ready().AndReturn(True) result1.successful().AndReturn(True) result1.get().AndReturn((True, None)) result2.ready().AndReturn(True) result2.successful().AndReturn(True) result2.get().AndReturn((True, None)) self.verifier_with_notifications.reconcile_failed() self.mox.StubOutWithMock(time, 'sleep', use_mock_anything=True) time.sleep(TICK_TIME) self.verifier_with_notifications._keep_running().AndReturn(False) self.mox.ReplayAll() self.verifier_with_notifications.run() self.mox.VerifyAll()
def _get_parent_logger(): return stacklog.get_logger('worker', is_parent=True)
def _get_child_logger(): return stacklog.get_logger('worker', is_parent=False)
def test_get_logger_should_return_existing_parent_logger_if_present(self): logger_1 = stacklog.get_logger("logger", is_parent=True) logger_2 = stacklog.get_logger("logger", is_parent=True) self.assertIs(logger_1, logger_2)
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]), os.pardir, os.pardir)) if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')): sys.path.insert(0, POSSIBLE_TOPDIR) from verifier import base_verifier from stacktach import models from stacktach import datetime_to_decimal as dt from verifier import FieldMismatch from verifier import AmbiguousResults from verifier import NotFound from verifier import VerificationException from stacktach import stacklog, message_service LOG = stacklog.get_logger('verifier') def _verify_field_mismatch(exists, launch): if not base_verifier._verify_date_field( launch.launched_at, exists.launched_at, same_second=True): raise FieldMismatch('launched_at', exists.launched_at, launch.launched_at) if launch.instance_type_id != exists.instance_type_id: raise FieldMismatch('instance_type_id', exists.instance_type_id, launch.instance_type_id) if launch.tenant != exists.tenant: raise FieldMismatch('tenant', exists.tenant, launch.tenant)
try: import ujson as json except ImportError: try: import simplejson as json except ImportError: import json from pympler.process import ProcessMemoryInfo from stacktach import db from stacktach import stacklog from stacktach import views stacklog.set_default_logger_name('worker') LOG = stacklog.get_logger() class Consumer(kombu.mixins.ConsumerMixin): def __init__(self, name, connection, deployment, durable, queue_arguments, exchange, topics, queue_name_prefix): self.connection = connection self.deployment = deployment self.durable = durable self.queue_arguments = queue_arguments self.name = name self.last_time = None self.pmi = None self.processed = 0 self.total_processed = 0 self.topics = topics
def test_get_logger_should_return_existing_child_logger_if_present(self): stacklog.get_logger("logger", is_parent=True) child_logger_1 = stacklog.get_logger("logger", is_parent=False) child_logger_2 = stacklog.get_logger("logger", is_parent=False) self.assertIs(child_logger_1, child_logger_2)
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' os.environ['STACKTACH_DB_ENGINE'] = '' os.environ['STACKTACH_DB_NAME'] = '' os.environ['STACKTACH_DB_HOST'] = '' os.environ['STACKTACH_DB_USERNAME'] = '' os.environ['STACKTACH_DB_PASSWORD'] = '' os.environ['STACKTACH_INSTALL_DIR'] = '' setup_sys_path() setup_environment() from stacktach import stacklog stacklog.set_default_logger_location("/tmp/%s.log") default_logger = stacklog.get_logger() worker_logger = stacklog.get_logger('worker') verifier_logger = stacklog.get_logger('verifier') class _AssertRaisesContext(object): """A context manager used to implement TestCase.assertRaises* methods.""" def __init__(self, expected, test_case, expected_regexp=None): self.expected = expected self.failureException = test_case.failureException self.expected_regexp = expected_regexp def __enter__(self): return self
def test_get_logger_should_return_existing_child_logger_if_present(self): stacklog.get_logger('logger', is_parent=True) child_logger_1 = stacklog.get_logger('logger', is_parent=False) child_logger_2 = stacklog.get_logger('logger', is_parent=False) self.assertIs(child_logger_1, child_logger_2)
def test_get_logger_should_return_existing_parent_logger_if_present(self): logger_1 = stacklog.get_logger('logger', is_parent=True) logger_2 = stacklog.get_logger('logger', is_parent=True) self.assertIs(logger_1, logger_2)
def test_get_logger_raise_exception_when_child_created_before_parent(self): with self.assertRaises(stacklog.ParentLoggerDoesNotExist): stacklog.get_logger('logger', is_parent=False)
help="Use query to match UMS, " "period length of 'day' required.", action='store_true') parser.add_argument( '--ums-offset', help="UMS' fencepost offset in seconds. Default: 4 days", type=int, default=DEFAULT_UMS_OFFSET) args = parser.parse_args() if args.ums and args.period_length != 'day': print "UMS query can only be used with period_length of 'day'." sys.exit(0) stacklog.set_default_logger_name('nova_usage_audit') parent_logger = stacklog.get_logger('nova_usage_audit', is_parent=True) log_listener = stacklog.LogListener(parent_logger) log_listener.start() if args.reconcile: with open(args.reconciler_config) as f: reconciler_config = json.load(f) reconciler = Reconciler(reconciler_config) if args.utcdatetime is not None: time = args.utcdatetime else: time = datetime.datetime.utcnow() start, end = usage_audit.get_previous_period(time, args.period_length)
from django.conf.urls import patterns, url from stacktach import stacklog stacklog.set_default_logger_name('stacktach-web') web_logger = stacklog.get_logger('stacktach-web') web_logger_listener = stacklog.LogListener(web_logger) web_logger_listener.start() urlpatterns = patterns('', url(r'^$', 'stacktach.views.welcome', name='welcome'), url(r'stacky/deployments/$', 'stacktach.stacky_server.do_deployments'), url(r'stacky/events/$', 'stacktach.stacky_server.do_events'), url(r'stacky/hosts/$', 'stacktach.stacky_server.do_hosts'), url(r'stacky/uuid/$', 'stacktach.stacky_server.do_uuid'), url(r'stacky/timings/$', 'stacktach.stacky_server.do_timings'), url(r'stacky/timings/uuid/$', 'stacktach.stacky_server.do_timings_uuid'), url(r'stacky/summary/$', 'stacktach.stacky_server.do_summary'), url(r'stacky/request/$', 'stacktach.stacky_server.do_request'), url(r'stacky/reports/$', 'stacktach.stacky_server.do_jsonreports'), url(r'stacky/report/(?P<report_id>\d+)/$', 'stacktach.stacky_server.do_jsonreport'), url(r'stacky/show/(?P<event_id>\d+)/$', 'stacktach.stacky_server.do_show'), url(r'stacky/watch/(?P<deployment_id>\d+)/$', 'stacktach.stacky_server.do_watch'), url(r'stacky/search/$', 'stacktach.stacky_server.search'), url(r'stacky/kpi/$', 'stacktach.stacky_server.do_kpi'), url(r'stacky/kpi/(?P<tenant_id>\w+)/$', 'stacktach.stacky_server.do_kpi'), url(r'stacky/usage/launches/$', 'stacktach.stacky_server.do_list_usage_launches'),
from django.conf.urls import patterns, url from stacktach import stacklog stacklog.set_default_logger_name('stacktach-web') web_logger = stacklog.get_logger('stacktach-web') web_logger_listener = stacklog.LogListener(web_logger) web_logger_listener.start() web_urls = ( url(r'^$', 'stacktach.views.welcome', name='welcome'), url(r'^(?P<deployment_id>\d+)/$', 'stacktach.views.home', name='home'), url(r'^(?P<deployment_id>\d+)/details/(?P<column>\w+)/(?P<row_id>\d+)/$', 'stacktach.views.details', name='details'), url(r'^(?P<deployment_id>\d+)/search/$', 'stacktach.views.search', name='search'), url(r'^(?P<deployment_id>\d+)/expand/(?P<row_id>\d+)/$', 'stacktach.views.expand', name='expand'), url(r'^(?P<deployment_id>\d+)/latest_raw/$', 'stacktach.views.latest_raw', name='latest_raw'), url(r'^(?P<deployment_id>\d+)/instance_status/$', 'stacktach.views.instance_status', name='instance_status'), ) stacky_urls = ( url(r'stacky/deployments/$', 'stacktach.stacky_server.do_deployments'),
def _setup_mock_logger(self): mock_logger = self.mox.CreateMockAnything() self.mox.StubOutWithMock(stacklog, 'get_logger') stacklog.get_logger('worker', is_parent=False).AndReturn(mock_logger) return mock_logger
def _get_parent_logger(): return stacklog.get_logger('verifier', is_parent=True)
parser.add_argument('--period_length', choices=['hour', 'day'], default='day') parser.add_argument('--utcdatetime', help="Override the end time used to generate report.", type=valid_datetime, default=None) parser.add_argument('--store', help="If set to true, report will be stored. " "Otherwise, it will just be printed", default=False, action="store_true") args = parser.parse_args() stacklog.set_default_logger_name('instance_hours') parent_logger = stacklog.get_logger('instance_hours', is_parent=True) log_listener = stacklog.LogListener(parent_logger) log_listener.start() tenant_manager = TenantManager() report = InstanceHoursReport(tenant_manager, time=args.utcdatetime, period_length=args.period_length) report.compile_hours() json = report.generate_json() if not args.store: print json else: report.store(json)
def log_warn(msg): global LOG if LOG is None: LOG = stacklog.get_logger() if LOG is not None: LOG.warn(msg)
def _get_child_logger(): return stacklog.get_logger('verifier', is_parent=False)
def setup_mock_log(self, name=None): if name is None: stacklog.get_logger(name=mox.IgnoreArg()).AndReturn(self.log) else: stacklog.get_logger(name=name).AndReturn(self.log)
def _setup_mock_logger(self): mock_logger = self.mox.CreateMockAnything() self.mox.StubOutWithMock(stacklog, "get_logger") stacklog.get_logger("verifier", is_parent=False).AndReturn(mock_logger) return mock_logger
def test_get_logger_raise_exception_when_child_created_before_parent(self): with self.assertRaises(stacklog.ParentLoggerDoesNotExist): stacklog.get_logger("logger", is_parent=False)
def _setup_mock_logger(self): mock_logger = self.mox.CreateMockAnything() self.mox.StubOutWithMock(stacklog, 'get_logger') stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger) return mock_logger
def _get_child_logger(): return stacklog.get_logger("verifier", is_parent=False)
def test_get_logger_should_get_exchange_logger_if_exchange_provided(self): filename = 'filename' logger = stacklog.get_logger(filename, 'nova') self.assertIsInstance(logger, ExchangeLogger) for file in glob.glob('{0}.log*'.format(filename)): os.remove(file)
def test_get_logger_should_get_default_logger_if_exchange_not_provided(self): filename = 'default_logger' logger = stacklog.get_logger(filename) self.assertIsInstance(logger, logging.Logger) for file in glob.glob('{0}.log*'.format(filename)): os.remove(file)
parser.add_argument('--ums', help="Use query to match UMS, " "period length of 'day' required.", action='store_true') parser.add_argument('--ums-offset', help="UMS' fencepost offset in seconds. Default: 4 days", type=int, default=DEFAULT_UMS_OFFSET) args = parser.parse_args() if args.ums and args.period_length != 'day': print "UMS query can only be used with period_length of 'day'." sys.exit(0) stacklog.set_default_logger_name('nova_usage_audit') parent_logger = stacklog.get_logger('nova_usage_audit', is_parent=True) log_listener = stacklog.LogListener(parent_logger) log_listener.start() if args.reconcile: with open(args.reconciler_config) as f: reconciler_config = json.load(f) reconciler = Reconciler(reconciler_config) if args.utcdatetime is not None: time = args.utcdatetime else: time = datetime.datetime.utcnow() start, end = usage_audit.get_previous_period(time, args.period_length)
try: import ujson as json except ImportError: try: import simplejson as json except ImportError: import json from pympler.process import ProcessMemoryInfo from stacktach import db from stacktach import stacklog from stacktach import views stacklog.set_default_logger_name('worker') LOG = stacklog.get_logger() class NovaConsumer(kombu.mixins.ConsumerMixin): def __init__(self, name, connection, deployment, durable, queue_arguments): self.connection = connection self.deployment = deployment self.durable = durable self.queue_arguments = queue_arguments self.name = name self.last_time = None self.pmi = None self.processed = 0 self.total_processed = 0 def _create_exchange(self, name, type, exclusive=False, auto_delete=False):
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' os.environ['STACKTACH_DB_ENGINE'] = '' os.environ['STACKTACH_DB_NAME'] = '' os.environ['STACKTACH_DB_HOST'] = '' os.environ['STACKTACH_DB_USERNAME'] = '' os.environ['STACKTACH_DB_PASSWORD'] = '' os.environ['STACKTACH_INSTALL_DIR'] = '' setup_sys_path() setup_environment() from stacktach import stacklog stacklog.set_default_logger_location("/tmp/%s.log") default_logger = stacklog.get_logger() worker_logger = stacklog.get_logger('worker') verifier_logger = stacklog.get_logger('verifier') class _AssertRaisesContext(object): """A context manager used to implement TestCase.assertRaises* methods.""" def __init__(self, expected, test_case, expected_regexp=None): self.expected = expected self.failureException = test_case.failureException self.expected_regexp = expected_regexp def __enter__(self): return self def __exit__(self, exc_type, exc_value, tb):