process = processes[pname]['process'] if process is not None: process.join() processes[pname]['process'] = None processes[pname]['pid'] = 0 def kill_time(signal, frame): global RUNNING RUNNING = False stop_all() if __name__ == '__main__': logger = _get_parent_logger() log_listener = stacklog.LogListener(logger) log_listener.start() manager = Manager() create_proc_table(manager) # NOTE (apmelton) # Close the connection before spinning up the child process, # otherwise the child process will attempt to use the connection # the parent process opened up to get/create the deployment. close_connection() signal.signal(signal.SIGINT, kill_time) signal.signal(signal.SIGTERM, kill_time) ver = "(Version: %s )" % version.get_version()
def make_and_start_verifier(exchange): # Gotta create it and run it this way so things don't get # lost when the process is forked. verifier = None if exchange == "nova": reconcile = verifier_config.reconcile() reconciler = None if reconcile: reconciler = _load_nova_reconciler() verifier = nova_verifier.NovaVerifier(verifier_config, reconciler=reconciler) elif exchange == "glance": verifier = glance_verifier.GlanceVerifier(verifier_config) verifier.run() verifier_config.load() log_listener = stacklog.LogListener(_get_parent_logger()) log_listener.start() for exchange in verifier_config.topics().keys(): process = Process(target=make_and_start_verifier, args=(exchange, )) process.start() processes.append(process) if len(processes) > 0: # Only pause parent process if there are children running. # Otherwise just end... signal.signal(signal.SIGINT, kill_time) signal.signal(signal.SIGTERM, kill_time) signal.pause()
help="Use query to match UMS, " "period length of 'day' required.", action='store_true') parser.add_argument('--ums-offset', help="UMS' fencepost offset in seconds. Default: 4 days", type=int, default=DEFAULT_UMS_OFFSET) args = parser.parse_args() if args.ums and args.period_length != 'day': print "UMS query can only be used with period_length of 'day'." sys.exit(0) stacklog.set_default_logger_name('nova_usage_audit') parent_logger = stacklog.get_logger('nova_usage_audit', is_parent=True) log_listener = stacklog.LogListener(parent_logger) log_listener.start() if args.reconcile: with open(args.reconciler_config) as f: reconciler_config = json.load(f) reconciler = Reconciler(reconciler_config) if args.utcdatetime is not None: time = args.utcdatetime else: time = datetime.datetime.utcnow() start, end = usage_audit.get_previous_period(time, args.period_length) summary, details = audit_for_period(start, end, ums=args.ums,
from django.conf.urls import patterns, url from stacktach import stacklog stacklog.set_default_logger_name('stacktach-web') web_logger = stacklog.get_logger('stacktach-web') web_logger_listener = stacklog.LogListener(web_logger) web_logger_listener.start() web_urls = ( url(r'^$', 'stacktach.views.welcome', name='welcome'), url(r'^(?P<deployment_id>\d+)/$', 'stacktach.views.home', name='home'), url(r'^(?P<deployment_id>\d+)/details/(?P<column>\w+)/(?P<row_id>\d+)/$', 'stacktach.views.details', name='details'), url(r'^(?P<deployment_id>\d+)/search/$', 'stacktach.views.search', name='search'), url(r'^(?P<deployment_id>\d+)/expand/(?P<row_id>\d+)/$', 'stacktach.views.expand', name='expand'), url(r'^(?P<deployment_id>\d+)/latest_raw/$', 'stacktach.views.latest_raw', name='latest_raw'), url(r'^(?P<deployment_id>\d+)/instance_status/$', 'stacktach.views.instance_status', name='instance_status'), ) stacky_urls = ( url(r'stacky/deployments/$', 'stacktach.stacky_server.do_deployments'),