def run(self): m.init(self.config['datanommer.sqlalchemy.url']) config = self.config if config.get('topic', None): if config.get('category', None): query = m.session.query( m.Message.topic, func.count(m.Message.topic)).filter( m.Message.category == config.get('category')) else: query = m.session.query(m.Message.topic, func.count(m.Message.topic)) query = query.group_by(m.Message.topic) else: if config.get('category', None): query = m.session.query( m.Message.category, func.count(m.Message.category)).filter( m.Message.category == config.get('category')) else: query = m.session.query(m.Message.category, func.count(m.Message.category)) query = query.group_by(m.Message.category) results = query.all() if config.get('topic', None): for topic, count in results: self.log.info("%s has %s entries" % (topic, count)) else: for category, count in results: self.log.info("%s has %s entries" % (category, count))
def setUp(self): uri = "sqlite:///%s" % filename self.config = { 'datanommer.sqlalchemy.url': uri, 'logging': { 'version': 1 } } self.config.update(fedmsg.config.load_config()) m.session = scoped_session(m.maker) m.init(uri=uri, create=True)
def run(self): m.init(self.config['datanommer.sqlalchemy.url']) config = self.config if config.get('topic', None): queries = [ m.Message.query.filter(m.Message.topic == config.get('topic')) ] elif config.get('category', None): queries = [ m.Message.query.filter( m.Message.category == config.get('category')) ] elif not config.get('overall', False): # If no args.. fedmsg.meta.make_processors(**config) categories = [p.__name__.lower() for p in fedmsg.meta.processors] queries = [ m.Message.query.filter(m.Message.category == category) for category in categories ] else: # Show only the single latest message, regardless of type. queries = [m.Message.query] # Order and limit to the latest. queries = [ q.order_by(m.Message.timestamp.desc()).limit(1) for q in queries ] def formatter(key, val): if config.get('timestamp', None) and config.get('human', None): return pretty_dumps(str(val.timestamp)) elif config.get('timestamp', None): return pretty_dumps(time.mktime(val.timestamp.timetuple())) elif config.get('timesince', None) and config.get('human', None): return pretty_dumps( str(datetime.datetime.now() - val.timestamp)) elif config.get('timesince', None): timedelta = datetime.datetime.now() - val.timestamp return pretty_dumps( str((timedelta.days * 86400) + timedelta.seconds)) else: return "{%s: %s}" % (pretty_dumps(key), pretty_dumps(val)) results = [] for result in sum([query.all() for query in queries], []): results.append(formatter(result.category, result)) self.log.info('[%s]' % ','.join(results))
def run(self): m.init(self.config['datanommer.sqlalchemy.url']) config = self.config query = m.Message.query if config.get('before', None): query = query.filter(m.Message.timestamp<=config.get('before')) if config.get('since', None): query = query.filter(m.Message.timestamp>=config.get('since')) results = query.all() self.log.info(pretty_dumps(results))
def run(self): m.init(self.config['datanommer.sqlalchemy.url']) config = self.config if config.get('topic', None): queries = [ m.Message.query.filter(m.Message.topic == config.get('topic')) ] elif config.get('category', None): queries = [m.Message.query.filter( m.Message.category == config.get('category') )] elif not config.get('overall', False): # If no args.. fedmsg.meta.make_processors(**config) categories = [ p.__name__.lower() for p in fedmsg.meta.processors ] queries = [m.Message.query.filter( m.Message.category == category ) for category in categories] else: # Show only the single latest message, regardless of type. queries = [m.Message.query] # Order and limit to the latest. queries = [ q.order_by(m.Message.timestamp.desc()).limit(1) for q in queries ] def formatter(key, val): if config.get('timestamp', None) and config.get('human', None): return pretty_dumps(str(val.timestamp)) elif config.get('timestamp', None): return pretty_dumps(time.mktime(val.timestamp.timetuple())) elif config.get('timesince', None) and config.get('human', None): return pretty_dumps(str(datetime.datetime.now()-val.timestamp)) elif config.get('timesince', None): timedelta = datetime.datetime.now() - val.timestamp return pretty_dumps(str((timedelta.days * 86400)+timedelta.seconds)) else: return "{%s: %s}" % (pretty_dumps(key), pretty_dumps(val)) results = [] for result in sum([query.all() for query in queries], []): results.append(formatter(result.category, result)) self.log.info('[%s]' % ','.join(results))
def run(self): m.init(self.config['datanommer.sqlalchemy.url']) config = self.config query = m.Message.query if config.get('before', None): query = query.filter(m.Message.timestamp <= config.get('before')) if config.get('since', None): query = query.filter(m.Message.timestamp >= config.get('since')) results = query.all() self.log.info(pretty_dumps(results))
def upgrade(): engine = op.get_bind().engine m.init(engine=engine) # git.branch branch = m.Message.topic.like(u'%.git.branch.%') for msg in m.Message.query.filter(branch).yield_per(100): prefix, suffix = msg.topic.split(u'.git.branch.') # Fix topic msg.topic = prefix + '.git.branch' # Fix message contents message = msg.msg message['name'] = '.'.join(suffix.split('.')[0:-1]) message['branch'] = suffix.split('.')[-1] msg.msg = message # Drop cert and sig msg.certificate = None msg.signature = None m.session.add(msg) # git.lookaside.*.new lookaside = m.Message.topic.like(u'%.git.lookaside.%.new') for msg in m.Message.query.filter(lookaside).yield_per(100): prefix, suffix = msg.topic.split(u'.git.lookaside.') # Fix topic msg.topic = prefix + '.git.lookaside.new' # Drop cert and sig msg.certificate = None msg.signature = None m.session.add(msg) # git.receive receive = m.Message.topic.like(u'%.git.receive.%') for msg in m.Message.query.filter(receive).yield_per(100): prefix, suffix = msg.topic.split(u'.git.receive.') # Fix topic msg.topic = prefix + '.git.receive' # Fix message contents message = msg.msg message['commit']['repo'] = '.'.join(suffix.split('.')[0:-1]) msg.msg = message # Drop cert and sig msg.certificate = None msg.signature = None m.session.add(msg) m.session.commit()
def upgrade(): """ This takes a *really* long time. Like, hours. """ config_paths = context.config.get_main_option('fedmsg_config_dir') filenames = fedmsg.config._gather_configs_in(config_paths) config = fedmsg.config.load_config(filenames=filenames) make_processors(**config) engine = op.get_bind().engine m.init(engine=engine) for msg in _page(m.Message.query.order_by(m.Message.timestamp)): print "processing", msg.timestamp, msg.topic if msg.users and msg.packages: continue changed = False if not msg.users: new_usernames = msg2usernames(msg.__json__(), **config) print "Updating users to %r" % new_usernames changed = changed or new_usernames for new_username in new_usernames: new_user = m.User.get_or_create(new_username) msg.users.append(new_user) if not msg.packages: new_packagenames = msg2packages(msg.__json__(), **config) print "Updating packages to %r" % new_packagenames changed = changed or new_usernames for new_packagename in new_packagenames: new_package = m.Package.get_or_create(new_packagename) msg.packages.append(new_package) if changed and random.random() < 0.01: # Only save if something changed.. and only do it every so often. # We do this so that if we crash, we can kind of pick up where # we left off. But if we do it on every change: too slow. print " * Saving!" m.session.commit() m.session.commit()
def run(self): m.init(self.config['datanommer.sqlalchemy.url']) config = self.config if config.get('topic', None): if config.get('category',None): query = m.session.query( m.Message.topic, func.count(m.Message.topic) ).filter( m.Message.category==config.get('category') ) else: query = m.session.query( m.Message.topic, func.count(m.Message.topic) ) query = query.group_by(m.Message.topic) else: if config.get('category',None): query = m.session.query( m.Message.category, func.count(m.Message.category) ).filter( m.Message.category==config.get('category') ) else: query = m.session.query( m.Message.category, func.count(m.Message.category) ) query = query.group_by(m.Message.category) results = query.all() if config.get('topic', None): for topic, count in results: self.log.info("%s has %s entries" % (topic, count)) else: for category, count in results: self.log.info("%s has %s entries" % (category, count))
def run(self): m.init( self.config['datanommer.sqlalchemy.url'], create=True )
def __init__(self): # m.init(uri= 'postgresql://*****:*****@localhost/datanommer') m.init('postgresql+psycopg2://datanommer:datanommer@localhost:5432/datanommer') self.graph = Graph()
import flask import hashlib import os import shelve import charts import sqlalchemy import datanommer.models as m m.init('postgres://*****:*****@localhost/datanommer') here = os.path.abspath(os.path.dirname(__file__)) datadir = os.path.join(here, '..', 'data') gravatar_dir = os.path.join(datadir, 'gravatars') montage_dir = os.path.join(datadir, 'montage') app = flask.Flask(__name__) def make_gravatar(username): email = username + "@fedoraproject.org" digest = hashlib.md5(email).hexdigest() gravatar = "http://www.gravatar.com/avatar/%s" % digest gravatar += "?s=128" return gravatar @app.route('/') def index(): rows = 5 columns = 12
) from pkg_resources import get_distribution app = flask.Flask(__name__) app.config.from_object('datagrepper.default_config') app.config.from_envvar('DATAGREPPER_CONFIG') app.config['CORS_DOMAINS'] = list(map(re.compile, app.config.get('CORS_DOMAINS', []))) app.config['CORS_HEADERS'] = list(map(re.compile, app.config.get('CORS_HEADERS', []))) # Read in the datanommer DB URL from /etc/fedmsg.d/ (or a local fedmsg.d/) fedmsg_config = fedmsg.config.load_config() fedmsg.meta.make_processors(**fedmsg_config) # Initialize a datanommer session. dm.init(fedmsg_config['datanommer.sqlalchemy.url']) import datagrepper.widgets @app.context_processor def inject_variable(): """ Inject some global variables into all templates """ extras = { 'models_version': get_distribution('datanommer.models').version, 'grepper_version': get_distribution('datagrepper').version, } style = { 'message_bus_link': 'http://fedmsg.com',
def init(): # Load stuff from /etc/fedmsg.d/ into a dict config = fedmsg.config.load_config(None, []) m.init(config['datanommer.sqlalchemy.url'])
def run(self): m.init(self.config['datanommer.sqlalchemy.url'], create=True)
from pkg_resources import get_distribution app = flask.Flask(__name__) app.config.from_object('datagrepper.default_config') app.config.from_envvar('DATAGREPPER_CONFIG') app.config['CORS_DOMAINS'] = list( map(re.compile, app.config.get('CORS_DOMAINS', []))) app.config['CORS_HEADERS'] = list( map(re.compile, app.config.get('CORS_HEADERS', []))) # Read in the datanommer DB URL from /etc/fedmsg.d/ (or a local fedmsg.d/) fedmsg_config = fedmsg.config.load_config() fedmsg.meta.make_processors(**fedmsg_config) # Initialize a datanommer session. dm.init(fedmsg_config['datanommer.sqlalchemy.url']) import datagrepper.widgets @app.context_processor def inject_variable(): """ Inject some global variables into all templates """ extras = { 'models_version': get_distribution('datanommer.models').version, 'grepper_version': get_distribution('datagrepper').version, } style = { 'message_bus_link': 'http://fedmsg.com',
import calendar from datetime import ( datetime, timedelta, ) import pygal import pygal.style from sqlalchemy.orm.exc import NoResultFound import datanommer.models as m m.init('postgres://*****:*****@localhost/datanommer') title_template = 'Fedora Development Activity for %[email protected]' def message_count(user, start): end = start + timedelta(days=1) if not user: return 0 return len([ msg for msg in user.messages if msg.timestamp >= start and msg.timestamp < end ]) def make_chart(username, style='default', **args): if isinstance(style, list): style = style[0]