def authenticate_from_oauth(sakuya_db): oauth_token = request.query.get('access_token') if oauth_token is not None: oauth_config = get_config('webapp')['oauth'] try: f = urllib2.urlopen(oauth_config['url'] + '/resource.php', urllib.urlencode({'oauth_token': oauth_token, 'getinfo': True}), 5) user = f.read() f.close() user = json.loads(user) for k in user.keys(): if isinstance(user[k], unicode): user[k] = user[k].encode('utf-8') row = sakuya_db.query(Users).filter_by(username=user['username']).first() if row is None: row = Users() row.username = user['username'] row.truename = user['chinese_name'] row.email = user['email'] sakuya_db.add(row) sakuya_db.commit() util.set_cookie('auth', (row.id, row.username, row.truename, row.mobile, row.email)) return True except Exception, e: print 'Fail to login from oauth.' traceback.print_exc()
def process_user_events(): logging.info("Processing user events (%d users)." % len(user_events)) # prepare email interface config = get_config("webapp") mail_config = {} mail_config["mail.on"] = config["turbomail"]["enable"] mail_config["mail.manager"] = config["turbomail"]["manager"] mail_config["mail.transport"] = config["turbomail"]["transport"] mail_config["mail.smtp.server"] = config["turbomail"]["server"] interface.start(mail_config) for id, info in user_events.iteritems(): if len(info["events"]) <= 5: for event in info["events"]: title, plain, rich = format_single(event) send_email(config["turbomail"]["sender"], info["truename"], info["email_list"], title, plain, rich) else: title, plain, rich = format_batch(info["events"]) send_email(config["turbomail"]["sender"], info["truename"], info["email_list"], title, plain, rich) if no_data: send_email( config["turbomail"]["sender"], "jizhang", ["*****@*****.**", "*****@*****.**"], "no data", "no data", "no data", )
def setup_logging(job, daemon=False, verbose=False): log_folder = '%s/%s' % (get_config('webapp')['job_logs_directory'], job) if not os.path.exists(log_folder): os.mkdir(log_folder) log_filename = '%s/log' % log_folder logger = logging.getLogger() if verbose: logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) handlers = [] if daemon: handlers.append(logging.handlers. TimedRotatingFileHandler(filename=log_filename, when='midnight')) else: handlers.append(logging.FileHandler(filename='%s.%s' % (log_filename, time.strftime('%Y-%m-%d')))) handlers.append(logging.StreamHandler()) for handler in handlers: if verbose: handler.setLevel(logging.DEBUG) else: handler.setLevel(logging.INFO) handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) logger.addHandler(handler)
def haopan(sakuya_db): try: dt = datetime.datetime.strptime(request.forms['time'], '%Y-%m-%d %H:%M') data = json.loads(request.forms['data']) if not isinstance(data[0], list) or len(data[0]) < 6: raise ValueError except Exception: return util.output('error', msg='Invalid parameters.') dtstr = dt.strftime('%Y-%m-%d %H:%M') with open(get_config('webapp')['hp_raw_data'], 'a') as f: f.write('time: %s\n' % dtstr) f.write('data:\n') for item in data: f.write('%s\n' % str(item)) f.write('\n') socket.send(msgpack.packb({ 'time': dtstr, 'data': data })) return util.output('ok', msg='%s, %d items received.' % (dtstr, len(data)))
def log(chart, category): with open(get_config('webapp')['chart_ack_log'], 'a') as f: l = u'[%s] user: %s, chart: %d-%s, category: %d-%s\n' %\ (datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), auth.get_user()['username'], chart.id, chart.name, category.id, category.name) f.write(l.encode('utf-8'))
def get_belongto(ip): belongto = None try: #f = urllib2.urlopen(get_config('webapp')['zeus_api_url'] + '/item/get_belongto_by_ip?ip=' + ip, None, 3) f = urllib2.urlopen(get_config('webapp')['zeus_api_url'] + '/item/get_label_by_ip?ip=' + ip, None, 3) belongto = f.read() except Exception, e: logging.exception('Fail to get belongto for ip %s' % ip)
def process_exists(name): fullpath = '%s/%s' % (get_config('webapp')['flocks_dir'], name) f = open(fullpath, 'w') try: fcntl.flock(f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError: logging.error('Process exists: cannot lock file %s' % fullpath) exit(2) else: globals()['_process_exists_flock'] = f
def __init__(self): self.session = sessionmaker(bind=engine_sakuya_db)() self.config = get_config('webapp') mail_config = {} mail_config['mail.on'] = self.config['turbomail']['enable'] mail_config['mail.manager'] = self.config['turbomail']['manager'] mail_config['mail.transport'] = self.config['turbomail']['transport'] mail_config['mail.smtp.server'] = 'smtp.126.com' mail_config['mail.smtp.username'] = '******' mail_config['mail.smtp.password'] = '******' interface.start(mail_config)
def __init__(self, args): self.args = args util.setup_logging('suite_basic', True) signal.signal(signal.SIGTERM, self.signal_handler) signal.signal(signal.SIGINT, self.signal_handler) with open(get_config('webapp')['suite_hosts']) as f: self.groups = yaml.load(f.read()) self.cmdgen = cmdgen.CommandGenerator()
def get_charts(no_cache=False): global _charts, _charts_expiry #now = datetime.datetime.now() #if now > _charts_expiry or no_cache: # _charts_expiry = now + datetime.timedelta(minutes=5) try: with file(get_config('webapp')['hp_aggregate_charts']) as f: _charts = msgpack.unpackb(f.read()) except Exception: pass if not isinstance(_charts, dict): _charts = {} return _charts
def get_login_info(): oauth_config = get_config('webapp')['oauth'] data = { 'client_id': oauth_config['client'], 'response_type': 'code', 'curl': True } try: f = urllib2.urlopen(oauth_config['url'] + '/authorize.php', urllib.urlencode(data), 3) code = f.read() f.close() code = json.loads(code) except Exception, e: code = {'code': ''}
def get_login_info(): oauth_config = get_config('webapp')['oauth'] data = { 'client_id': oauth_config['client'], 'response_type': 'code', 'curl': True } try: f = urllib2.urlopen(oauth_config['url'] + '/authorize.php', urllib.urlencode(data), 3) code = f.read() f.close() code = json.loads(code) except Exception, e: code = {'code':''}
def main(): socket = zmq.Socket(zmq.Context.instance(), zmq.PUSH) socket.connect(get_config('webapp')['rule_logger_endpoint']) msg = { 'time': util.millitime(), 'datasource': 'access_log', 'event': {'upstream_addr': '10.10.5.244'}, 'rules': [1, 2] } socket.send(msgpack.packb(msg)) msg['rules'] = [1] socket.send(msgpack.packb(msg)) msg['event'] = {'upstream_addr': '1.2.3.5'} msg['rules'] = [2] socket.send(msgpack.packb(msg)) socket.send(msgpack.packb(msg)) msg['time'] += 61000 socket.send(msgpack.packb(msg))
def run(self): util.setup_logging('hp_aggregate', True) signal.signal(signal.SIGTERM, self.signal_handler) signal.signal(signal.SIGINT, self.signal_handler) self.socket = zmq.Socket(zmq.Context.instance(), zmq.PULL) self.socket.bind(get_config('webapp')['hp_aggregate_bind']) self.sakuya_db = sessionmaker(bind=engine_sakuya_db)() self.next_dt = datetime.datetime.now() self.data = [] logging.info('Start looping...') self.loop() self.socket.close()
def run(self): util.setup_logging('rule_logger', True, self.args.verbose) signal.signal(signal.SIGTERM, self.signal_handler) signal.signal(signal.SIGINT, self.signal_handler) self.socket = zmq.Socket(zmq.Context.instance(), zmq.PULL) self.socket.bind(get_config('webapp')['rule_logger_endpoint']) self.sakuya_db = sessionmaker(bind=engine_sakuya_db)() self.init_rule_methods() self.datasource_rules = {} self.next_time = {} logging.info('Start looping...') self.loop() self.socket.close()
def main(): socket = zmq.Socket(zmq.Context.instance(), zmq.PUSH) socket.connect(get_config('webapp')['rule_logger_endpoint']) msg = { 'time': util.millitime(), 'datasource': 'access_log', 'event': { 'upstream_addr': '10.10.5.244' }, 'rules': [1, 2] } socket.send(msgpack.packb(msg)) msg['rules'] = [1] socket.send(msgpack.packb(msg)) msg['event'] = {'upstream_addr': '1.2.3.5'} msg['rules'] = [2] socket.send(msgpack.packb(msg)) socket.send(msgpack.packb(msg)) msg['time'] += 61000 socket.send(msgpack.packb(msg))
def authenticate_from_oauth(sakuya_db): oauth_token = request.query.get('access_token') if oauth_token is not None: oauth_config = get_config('webapp')['oauth'] try: f = urllib2.urlopen( oauth_config['url'] + '/resource.php', urllib.urlencode({ 'oauth_token': oauth_token, 'getinfo': True }), 5) user = f.read() f.close() user = json.loads(user) for k in user.keys(): if isinstance(user[k], unicode): user[k] = user[k].encode('utf-8') row = sakuya_db.query(Users).filter_by( username=user['username']).first() if row is None: row = Users() row.username = user['username'] row.truename = user['chinese_name'] row.email = user['email'] sakuya_db.add(row) sakuya_db.commit() util.set_cookie( 'auth', (row.id, row.username, row.truename, row.mobile, row.email)) return True except Exception, e: print 'Fail to login from oauth.' traceback.print_exc()
def haopan(sakuya_db): try: dt = datetime.datetime.strptime(request.forms['time'], '%Y-%m-%d %H:%M') data = json.loads(request.forms['data']) if not isinstance(data[0], list) or len(data[0]) < 6: raise ValueError except Exception: return util.output('error', msg='Invalid parameters.') dtstr = dt.strftime('%Y-%m-%d %H:%M') with open(get_config('webapp')['hp_raw_data'], 'a') as f: f.write('time: %s\n' % dtstr) f.write('data:\n') for item in data: f.write('%s\n' % str(item)) f.write('\n') socket.send(msgpack.packb({'time': dtstr, 'data': data})) return util.output('ok', msg='%s, %d items received.' % (dtstr, len(data)))
def process_user_events(): logging.info('Processing user events (%d users).' % len(user_events)) # prepare email interface config = get_config('webapp') mail_config = {} mail_config['mail.on'] = config['turbomail']['enable'] mail_config['mail.manager'] = config['turbomail']['manager'] mail_config['mail.transport'] = config['turbomail']['transport'] mail_config['mail.smtp.server'] = config['turbomail']['server'] interface.start(mail_config) for id, info in user_events.iteritems(): if len(info['events']) <= 5: for event in info['events']: title, plain, rich = format_single(event) send_email(config['turbomail']['sender'], info['truename'], info['email_list'], title, plain, rich) else: title, plain, rich = format_batch(info['events']) send_email(config['turbomail']['sender'], info['truename'], info['email_list'], title, plain, rich) if no_data: send_email(config['turbomail']['sender'], 'jizhang', ['*****@*****.**', '*****@*****.**'], 'no data', 'no data', 'no data')
def setup_logging(job, daemon=False, verbose=False): log_folder = '%s/%s' % (get_config('webapp')['job_logs_directory'], job) if not os.path.exists(log_folder): os.mkdir(log_folder) log_filename = '%s/log' % log_folder logger = logging.getLogger() if verbose: logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) handlers = [] if daemon: handlers.append( logging.handlers.TimedRotatingFileHandler(filename=log_filename, when='midnight')) else: handlers.append( logging.FileHandler(filename='%s.%s' % (log_filename, time.strftime('%Y-%m-%d')))) handlers.append(logging.StreamHandler()) for handler in handlers: if verbose: handler.setLevel(logging.DEBUG) else: handler.setLevel(logging.INFO) handler.setFormatter( logging.Formatter('%(asctime)s %(levelname)s %(message)s')) logger.addHandler(handler)
import json import urllib2 import time import datetime import yaml import errno import signal import logging from contextlib import closing from pysnmp.entity.rfc3413.oneliner import cmdgen from sakuya.config import get_config from sakuya.lib import util STATS_INTERVAL = 60 # 60" SUITE_DATA_API = get_config('webapp')['base_url'] + '/api/suite-data' class Job(object): interrupted = False def __init__(self, args): self.args = args util.setup_logging('suite_basic', True) signal.signal(signal.SIGTERM, self.signal_handler) signal.signal(signal.SIGINT, self.signal_handler) with open(get_config('webapp')['suite_hosts']) as f: self.groups = yaml.load(f.read())
traceback.print_exc() return util.output('error', msg='Internal error.') try: chart.api_ip = util.ip2long(request['REMOTE_ADDR']) chart.api_ts = util.timestamp() sakuya_db.commit() except Exception, e: sakuya_db.rollback() return util.output('ok') socket = zmq.Socket(zmq.Context.instance(), zmq.PUSH) socket.connect(get_config('webapp')['hp_aggregate_connect']) @app.post('/api/haopan') def haopan(sakuya_db): try: dt = datetime.datetime.strptime(request.forms['time'], '%Y-%m-%d %H:%M') data = json.loads(request.forms['data']) if not isinstance(data[0], list) or len(data[0]) < 6: raise ValueError except Exception: return util.output('error', msg='Invalid parameters.') dtstr = dt.strftime('%Y-%m-%d %H:%M')
# -*- coding: utf-8 -*- import fcntl import os import json import logging import logging.handlers import time import datetime from bottle import request, response from sakuya.config import get_config COOKIE_SECRET = get_config('webapp')['cookie_secret'] def millitime(): return int(round(time.time() * 1000)) def timestamp(dt=None): if dt is None: dt = datetime.datetime.now() return int(round(time.mktime(dt.timetuple()))) def output(status, **kwargs): return json.dumps(dict(status=status, **kwargs)) def get_cookie(key): return request.get_cookie(key, secret=COOKIE_SECRET) def set_cookie(key, value): return response.set_cookie(key, value, secret=COOKIE_SECRET, path="/")
import urllib import urllib2 import os import shutil import argparse import msgpack import time import datetime import logging from sqlalchemy.orm import sessionmaker from sakuya.lib import haopan, util from sakuya.config import get_config from sakuya.models import engine_sakuya_db from sakuya.models.sakuya_db import Charts CHARTFILE = get_config('webapp')['hp_aggregate_charts'] CATEGORY_ID = 100 class Job: def run(self): util.setup_logging('hp_create') self.sakuya_db = sessionmaker(bind=engine_sakuya_db)() data = [] for i in haopan.PRODS: for j in haopan.CHARTS: for k in haopan.CITIES: for l in haopan.CHANNELS:
sakuya_db.rollback() traceback.print_exc() return util.output('error', msg='Internal error.') try: chart.api_ip = util.ip2long(request['REMOTE_ADDR']) chart.api_ts = util.timestamp() sakuya_db.commit() except Exception, e: sakuya_db.rollback() return util.output('ok') socket = zmq.Socket(zmq.Context.instance(), zmq.PUSH) socket.connect(get_config('webapp')['hp_aggregate_connect']) @app.post('/api/haopan') def haopan(sakuya_db): try: dt = datetime.datetime.strptime(request.forms['time'], '%Y-%m-%d %H:%M') data = json.loads(request.forms['data']) if not isinstance(data[0], list) or len(data[0]) < 6: raise ValueError except Exception: return util.output('error', msg='Invalid parameters.') dtstr = dt.strftime('%Y-%m-%d %H:%M') with open(get_config('webapp')['hp_raw_data'], 'a') as f:
'grant_type': 'authorization_code', 'code': code['code'] } def redirect_to_oauth(): login_info = get_login_info() return redirect('%s/token.php?%s' % (login_info.pop('url'), urllib.urlencode(login_info))) def login(func): def wrapper(*args, **kwargs): if get_user() is None: return abort(403) return func(*args, **kwargs) return wrapper _roles = get_config('webapp')['roles'] for k, v in _roles.iteritems(): if k == 'admin': continue v.extend(_roles['admin']) def role(*roles): def decorator(func): def wrapper(*args, **kwargs): if not is_role(*roles): return abort(403) return func(*args, **kwargs) return wrapper return decorator def is_role(*roles):
# -*- coding: utf-8 -*- from bottle.ext import sqlalchemy from sqlalchemy import create_engine from sakuya.config import get_config from sakuya.models import sakuya_db, db_monitor URI_FORMAT = '%(adapter)s://%(username)s:%(password)s@%(host)s:%(port)d/%(database)s?charset=utf8&use_unicode=1' db_config = get_config('database') engine_sakuya_db = create_engine(URI_FORMAT % db_config['sakuya_db'], echo=db_config['sakuya_db']['echo'], pool_recycle=3600) plugin_sakuya_db = sqlalchemy.Plugin(engine_sakuya_db, sakuya_db.Base.metadata, keyword='sakuya_db', use_kwargs=True) engine_db_monitor = create_engine(URI_FORMAT % db_config['db_monitor'], echo=db_config['db_monitor']['echo'], pool_recycle=3600) plugin_db_monitor = sqlalchemy.Plugin(engine_db_monitor, db_monitor.Base.metadata, keyword='db_monitor', use_kwargs=True) def init_db(): sakuya_db.Base.metadata.create_all(bind=engine_sakuya_db) def drop_db():
# -*- coding: utf-8 -*- from bottle.ext import sqlalchemy from sqlalchemy import create_engine from sakuya.config import get_config from sakuya.models import sakuya_db, db_monitor URI_FORMAT = '%(adapter)s://%(username)s:%(password)s@%(host)s:%(port)d/%(database)s?charset=utf8&use_unicode=1' db_config = get_config('database') engine_sakuya_db = create_engine(URI_FORMAT % db_config['sakuya_db'], echo=db_config['sakuya_db']['echo'], pool_recycle=3600) plugin_sakuya_db = sqlalchemy.Plugin(engine_sakuya_db, sakuya_db.Base.metadata, keyword='sakuya_db', use_kwargs=True) engine_db_monitor = create_engine(URI_FORMAT % db_config['db_monitor'], echo=db_config['db_monitor']['echo'], pool_recycle=3600) plugin_db_monitor = sqlalchemy.Plugin(engine_db_monitor, db_monitor.Base.metadata, keyword='db_monitor', use_kwargs=True) def init_db(): sakuya_db.Base.metadata.create_all(bind=engine_sakuya_db)
def get_zk(): global _zk if _zk is None: _zk = KazooClient(get_config('webapp')['zookeeper_hosts']) _zk.start() return _zk
def redirect_to_oauth(): login_info = get_login_info() return redirect('%s/token.php?%s' % (login_info.pop('url'), urllib.urlencode(login_info))) def login(func): def wrapper(*args, **kwargs): if get_user() is None: return abort(403) return func(*args, **kwargs) return wrapper _roles = get_config('webapp')['roles'] for k, v in _roles.iteritems(): if k == 'admin': continue v.extend(_roles['admin']) def role(*roles): def decorator(func): def wrapper(*args, **kwargs): if not is_role(*roles): return abort(403) return func(*args, **kwargs) return wrapper
# -*- coding: utf-8 -*- import fcntl import os import json import logging import logging.handlers import time import datetime from bottle import request, response from sakuya.config import get_config COOKIE_SECRET = get_config('webapp')['cookie_secret'] def millitime(): return int(round(time.time() * 1000)) def timestamp(dt=None): if dt is None: dt = datetime.datetime.now() return int(round(time.mktime(dt.timetuple()))) def output(status, **kwargs): return json.dumps(dict(status=status, **kwargs)) def get_cookie(key): return request.get_cookie(key, secret=COOKIE_SECRET)