예제 #1
0
    def test_get_logger_should_use_default_name_when_name_not_specified(self):
        logger = stacklog.get_logger(None, is_parent=True)
        self.assertEquals(logger.name, stacklog.default_logger_name)

        stacklog.set_default_logger_name('default')
        logger = stacklog.get_logger(None, is_parent=True)
        self.assertEquals(logger.name, 'default')
예제 #2
0
    def test_get_logger_should_use_default_name_when_name_not_specified(self):
        logger = stacklog.get_logger(None, is_parent=True)
        self.assertEquals(logger.name, stacklog.default_logger_name)

        stacklog.set_default_logger_name("default")
        logger = stacklog.get_logger(None, is_parent=True)
        self.assertEquals(logger.name, "default")
예제 #3
0
    import ujson as json
except ImportError:
    try:
        import simplejson as json
    except ImportError:
        import json

from pympler.process import ProcessMemoryInfo

from django.db import connection as db_connection
from stacktach import db
from stacktach import message_service
from stacktach import stacklog
from stacktach import views

stacklog.set_default_logger_name('worker')
shutdown_soon = False


def _get_child_logger():
    return stacklog.get_logger('worker', is_parent=False)


class Consumer(kombu.mixins.ConsumerMixin):
    def __init__(self, name, connection, deployment, durable, queue_arguments,
                 exchange, topics, connect_max_retries=10):
        self.connect_max_retries = connect_max_retries
        self.retry_attempts = 0
        self.connection = connection
        self.deployment = deployment
        self.durable = durable
예제 #4
0
POSSIBLE_TOPDIR = os.path.normpath(
    os.path.join(os.path.abspath(sys.argv[0]), os.pardir, os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
    sys.path.insert(0, POSSIBLE_TOPDIR)

from stacktach import db, stacklog
from stacktach import version
from django.db import close_connection

import worker.worker as worker
from worker import config

processes = {}
log_listener = None
stacklog.set_default_logger_name('worker')

DEFAULT_PROC_TIMEOUT = 600
RUNNING = True


def _get_parent_logger():
    return stacklog.get_logger('worker', is_parent=True)


def create_proc_table(manager):
    for deployment in config.deployments():
        if deployment.get('enabled', True):
            name = deployment['name']
            db_deployment, new = db.get_or_create_deployment(name)
            for exchange in deployment.get('topics').keys():
예제 #5
0
    os.path.join(os.path.abspath(sys.argv[0]), os.pardir, os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
    sys.path.insert(0, POSSIBLE_TOPDIR)

from stacktach import models
from verifier import FieldMismatch
from verifier import VerificationException
from verifier import base_verifier
from verifier import NullFieldException
from verifier import NotFound
from stacktach import datetime_to_decimal as dt
from stacktach import stacklog
from stacktach import message_service
import datetime

stacklog.set_default_logger_name('verifier')


def _get_child_logger():
    return stacklog.get_logger('verifier', is_parent=False)


def _verify_field_mismatch(exists, usage):
    if not base_verifier._verify_date_field(
            usage.created_at, exists.created_at, same_second=True):
        raise FieldMismatch('created_at', exists.created_at, usage.created_at,
                            exists.uuid)

    if usage.owner != exists.owner:
        raise FieldMismatch('owner', exists.owner, usage.owner, exists.uuid)
예제 #6
0
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
    sys.path.insert(0, POSSIBLE_TOPDIR)

from verifier import base_verifier
from verifier import config
from verifier import NullFieldException
from stacktach import models
from stacktach import stacklog
from stacktach import datetime_to_decimal as dt
from verifier import FieldMismatch
from verifier import AmbiguousResults
from verifier import NotFound
from verifier import VerificationException
from stacktach import message_service

stacklog.set_default_logger_name('verifier')


def _get_child_logger():
    return stacklog.get_logger('verifier', is_parent=False)


def _verify_field_mismatch(exists, launch):
    flavor_field_name = config.flavor_field_name()
    if not base_verifier._verify_date_field(
            launch.launched_at, exists.launched_at, same_second=True):
        raise FieldMismatch(
            'launched_at',
            {'name': 'exists', 'value': exists.launched_at},
            {'name': 'launches', 'value': launch.launched_at},
            exists.instance)
예제 #7
0
                        default='/etc/stacktach/reconciler-config.json')
    parser.add_argument('--ums',
                        help="Use query to match UMS, "
                             "period length of 'day' required.",
                        action='store_true')
    parser.add_argument('--ums-offset',
                        help="UMS' fencepost offset in seconds. Default: 4 days",
                        type=int,
                        default=DEFAULT_UMS_OFFSET)
    args = parser.parse_args()

    if args.ums and args.period_length != 'day':
        print "UMS query can only be used with period_length of 'day'."
        sys.exit(0)

    stacklog.set_default_logger_name('nova_usage_audit')
    parent_logger = stacklog.get_logger('nova_usage_audit', is_parent=True)
    log_listener = stacklog.LogListener(parent_logger)
    log_listener.start()

    if args.reconcile:
        with open(args.reconciler_config) as f:
            reconciler_config = json.load(f)
            reconciler = Reconciler(reconciler_config)

    if args.utcdatetime is not None:
        time = args.utcdatetime
    else:
        time = datetime.datetime.utcnow()

    start, end = usage_audit.get_previous_period(time, args.period_length)
예제 #8
0
파일: urls.py 프로젝트: Priyaag/stacktach
from django.conf.urls import patterns, url

from stacktach import stacklog

stacklog.set_default_logger_name('stacktach-web')
web_logger = stacklog.get_logger('stacktach-web')
web_logger_listener = stacklog.LogListener(web_logger)
web_logger_listener.start()

urlpatterns = patterns('',
    url(r'^$', 'stacktach.views.welcome', name='welcome'),
    url(r'stacky/deployments/$', 'stacktach.stacky_server.do_deployments'),
    url(r'stacky/events/$', 'stacktach.stacky_server.do_events'),
    url(r'stacky/hosts/$', 'stacktach.stacky_server.do_hosts'),
    url(r'stacky/uuid/$', 'stacktach.stacky_server.do_uuid'),
    url(r'stacky/timings/$', 'stacktach.stacky_server.do_timings'),
    url(r'stacky/timings/uuid/$', 'stacktach.stacky_server.do_timings_uuid'),
    url(r'stacky/summary/$', 'stacktach.stacky_server.do_summary'),
    url(r'stacky/request/$', 'stacktach.stacky_server.do_request'),
    url(r'stacky/reports/$', 'stacktach.stacky_server.do_jsonreports'),
    url(r'stacky/report/(?P<report_id>\d+)/$',
                            'stacktach.stacky_server.do_jsonreport'),
    url(r'stacky/show/(?P<event_id>\d+)/$',
                                        'stacktach.stacky_server.do_show'),
    url(r'stacky/watch/(?P<deployment_id>\d+)/$',
                                        'stacktach.stacky_server.do_watch'),
    url(r'stacky/search/$', 'stacktach.stacky_server.search'),
    url(r'stacky/kpi/$', 'stacktach.stacky_server.do_kpi'),
    url(r'stacky/kpi/(?P<tenant_id>\w+)/$', 'stacktach.stacky_server.do_kpi'),
    url(r'stacky/usage/launches/$',
        'stacktach.stacky_server.do_list_usage_launches'),
예제 #9
0
파일: urls.py 프로젝트: macdomat/stacktach
from django.conf.urls import patterns, url

from stacktach import stacklog

stacklog.set_default_logger_name('stacktach-web')
web_logger = stacklog.get_logger('stacktach-web')
web_logger_listener = stacklog.LogListener(web_logger)
web_logger_listener.start()

web_urls = (
    url(r'^$', 'stacktach.views.welcome', name='welcome'),
    url(r'^(?P<deployment_id>\d+)/$', 'stacktach.views.home', name='home'),
    url(r'^(?P<deployment_id>\d+)/details/(?P<column>\w+)/(?P<row_id>\d+)/$',
        'stacktach.views.details',
        name='details'),
    url(r'^(?P<deployment_id>\d+)/search/$',
        'stacktach.views.search',
        name='search'),
    url(r'^(?P<deployment_id>\d+)/expand/(?P<row_id>\d+)/$',
        'stacktach.views.expand',
        name='expand'),
    url(r'^(?P<deployment_id>\d+)/latest_raw/$',
        'stacktach.views.latest_raw',
        name='latest_raw'),
    url(r'^(?P<deployment_id>\d+)/instance_status/$',
        'stacktach.views.instance_status',
        name='instance_status'),
)

stacky_urls = (
    url(r'stacky/deployments/$', 'stacktach.stacky_server.do_deployments'),
예제 #10
0
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]), os.pardir, os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, "stacktach")):
    sys.path.insert(0, POSSIBLE_TOPDIR)

from stacktach import models
from verifier import FieldMismatch
from verifier import VerificationException
from verifier import base_verifier
from verifier import NullFieldException
from verifier import NotFound
from stacktach import datetime_to_decimal as dt
from stacktach import stacklog
from stacktach import message_service
import datetime

stacklog.set_default_logger_name("verifier")


def _get_child_logger():
    return stacklog.get_logger("verifier", is_parent=False)


def _verify_field_mismatch(exists, usage):
    if not base_verifier._verify_date_field(usage.created_at, exists.created_at, same_second=True):
        raise FieldMismatch(
            "created_at",
            {"name": "exists", "value": exists.created_at},
            {"name": "launches", "value": usage.created_at},
            exists.uuid,
        )
예제 #11
0
    parser = argparse.ArgumentParser('StackTach Instance Hours Report')
    parser.add_argument('--period_length',
                        choices=['hour', 'day'],
                        default='day')
    parser.add_argument('--utcdatetime',
                        help="Override the end time used to generate report.",
                        type=valid_datetime,
                        default=None)
    parser.add_argument('--store',
                        help="If set to true, report will be stored. "
                        "Otherwise, it will just be printed",
                        default=False,
                        action="store_true")
    args = parser.parse_args()

    stacklog.set_default_logger_name('instance_hours')
    parent_logger = stacklog.get_logger('instance_hours', is_parent=True)
    log_listener = stacklog.LogListener(parent_logger)
    log_listener.start()

    tenant_manager = TenantManager()
    report = InstanceHoursReport(tenant_manager,
                                 time=args.utcdatetime,
                                 period_length=args.period_length)

    report.compile_hours()
    json = report.generate_json()

    if not args.store:
        print json
    else: