def FromMeta(self, artist, album, track_number, song): """Populate a track from its meta data.""" # Cleanup input artist = artist.rstrip() album = album.rstrip() song = song.rstrip() self.persistant = self.db.GetOneRow( 'select * from tracks where ' 'artist=%s and album=%s ' 'and number=%d and song=%s;' % (sql.FormatSqlValue('artist', artist), sql.FormatSqlValue('album', album), track_number, sql.FormatSqlValue('song', song))) if not self.persistant: self.persistant = {} self.persistant['artist'] = artist self.persistant['album'] = album self.persistant['number'] = track_number self.persistant['song'] = song self.persistant['plays'] = 0 self.persistant['skips'] = 0 self.persistant['creation_time'] = datetime.datetime.now()
def WriteOneRow(self, table, key_col, dict): """WriteOneRow -- use a dictionary to write a row to the specified table""" row_count = 0 try: cursor = self.db_connection.cursor(MySQLdb.cursors.DictCursor) cursor.execute('select %s from %s where %s = "%s"' % (key_col, table, key_col, dict[key_col])) row_count = cursor.rowcount cursor.close() except: pass if dict.has_key(key_col) and row_count > 0: vals = [] for col in dict: val = '%s=%s' % (col, sql.FormatSqlValue(col, dict[col])) vals.append(val) sqlcmd = ('update %s set %s where %s="%s";' % (table, ','.join(vals), key_col, dict[key_col])) else: vals = [] for col in dict: val = sql.FormatSqlValue(col, dict[col]) vals.append(val) sqlcmd = ('insert into %s (%s) values(%s);' % (table, ','.join(dict.keys()), ','.join(vals))) self.db_connection.query(sqlcmd) self.db_connection.query('commit;')
def ResolveGroupMembers(cursor, usersliststring, table, window_size): showusers = [] one_day = datetime.timedelta(days=1) start_of_window = datetime.datetime.now() start_of_window -= one_day * window_size for userish in usersliststring.lstrip(' ').split(' '): if userish.startswith('g:'): group_name = userish.split(':')[1] if group_name == 'all': cursor.execute( 'select distinct(username), max(day) from %s ' 'where day > date(%s) group by username;' % (table, sql.FormatSqlValue('timestamp', start_of_window))) for row in cursor: showusers.append(row['username']) else: for user in GetGroupMembers(cursor, group_name): showusers.append(user) else: showusers.append(userish) if len(showusers) == 0: showusers = ['mikalstill'] return showusers
def Reviews(db, component): cursor = db.cursor(MySQLdb.cursors.DictCursor) for l in dbcachingexecute.Execute( db, time.time() - 300, 'gerrit_query_approvals_json', ('ssh review.openstack.org gerrit query ' 'project:%s --all-approvals --patch-sets ' '--format JSON'), component, cleanup=True): try: d = json.loads(l) except: continue for ps in d.get('patchSets', {}): for review in ps.get('approvals', []): # Deliberately leave the timezone alone here so its consistant with # reports others generate. updated_at = datetime.datetime.fromtimestamp( review['grantedOn']) print('%s review by %s at %d for %s' % (d['id'], review['by'].get('username', 'unknown'), review['grantedOn'], component)) cursor.execute( 'insert ignore into reviews ' '(changeid, username, timestamp, component) values ' '("%s", "%s", %s, "%s");' % (d['id'], review['by'].get('username', 'unknown'), sql.FormatSqlValue('timestamp', updated_at), component)) cursor.execute('commit;')
def GetUpdates(eventname, showusers, project, last_time): while True: time.sleep(60) # Rebuild the DB connection in case the DB went away cursor = GetCursor() SendKeepAlive() SendDebug('Querying for updates after %d, server time %s' % (last_time, datetime.datetime.now())) for username in showusers: ts = datetime.datetime.now() ts -= datetime.timedelta(days=5) cursor.execute('select * from %ssummary where username="******" ' 'and epoch > %d and day > date(%s);' % (eventname, username, last_time, sql.FormatSqlValue('timestamp', ts))) for row in cursor: SendPacket({ 'type': 'update-value', 'user': username, 'written-at': row['epoch'], 'day': row['day'].isoformat(), 'payload': json.loads(row['data']).get(project, 0) }) if row['epoch'] > last_time: last_time = row['epoch']
def handleurl_unmerge(self, urlfile): """Undo a merge.""" (_, _, tracks) = urlfile.split('/') tracks = tracks.replace(' ', '').replace('%20', '').split(',') self.log('Unmerging %s' % repr(tracks)) first_track = track.Track(db) first_track.persistant = eval( db.GetOneRow('select * from events ' 'where ' 'event = "merge: before" ' 'and ' 'track_id = %s order by ' 'timestamp asc limit 1;' % tracks[0])['details']) self.log('Previous state of %s: %s' % (tracks[0], repr(first_track.persistant))) first_track.Store() for t_id in tracks[1:]: t = track.Track(db) t.persistant = eval( db.GetOneRow('select * from events where ' 'event = "merge: deleted" and ' 'track_id = %s order by ' 'timestamp desc limit 1;' % t_id)['details']) tags = eval( db.GetOneRow('select * from events where ' 'event = "merge: tags: %s" ' 'order by timestamp desc limit 1;' % t_id)['details']) paths = eval( db.GetOneRow('select * from events where ' 'event = "merge: paths: %s" ' 'order by timestamp desc limit 1;' % t_id)['details']) self.log('Previous state of %s: %s. Tags %s, Paths %s.' % (t_id, repr(t.persistant), tags, paths)) t.Store() for tag in tags: try: db.ExecuteSql( 'update tags set track_id=%s where tag="%s";' % (t_id, tag['tag'])) except: pass for path in paths: try: db.ExecuteSql( 'update paths set track_id=%s where path=%s;' % (t_id, sql.FormatSqlValue('path', path['path']))) except: pass db.ExecuteSql('commit;')
def handleurl_art(self, urlfile): """Serve an image for a given album, if we have one.""" (_, _, artist, album) = urlfile.replace('%20', ' ').split('/') self.log('Fetching art for "%s" "%s"' % (artist, album)) row = db.GetOneRow('select art from art where artist=%s and ' 'album=%s;' % (sql.FormatSqlValue('artist', artist), sql.FormatSqlValue('album', album))) if not row or not row['art']: self.senderror(404, 'No such art') return data = base64.decodestring(row['art']) self.sendheaders('HTTP/1.1 200 OK\r\n' 'Content-Type: image/jpeg\r\n' 'Content-Length: %s\r\n' '%s\r\n' % (len(data), '\r\n'.join(self.extra_headers))) self.buffer += data
def Execute(last_change, cmd_name, cmd, arg, cleanup=False): actual_cmd = cmd % arg print 'Executing: %s' % actual_cmd cursor = feedutils.GetCursor() try: cursor.execute('create table commands_%s (arg varchar(500), ' 'timestamp datetime, epoch int, result longtext, ' 'primary key(arg, timestamp), index(timestamp), ' 'index(epoch)) ' 'engine=innodb;' % cmd_name) except: pass cursor.execute('delete from commands_%s where arg="%s" and epoch < %s;' % (cmd_name, arg, last_change)) print ' ... Cleaned up %d old cache entries' % cursor.rowcount cursor.execute('commit;') cursor.execute('select timestamp, epoch, result from commands_%s ' 'where arg="%s" order by timestamp desc limit 1;' % (cmd_name, arg)) if cursor.rowcount > 0: row = cursor.fetchone() if row['epoch'] > last_change: print ' ... Using cached result' return base64.decodestring(row['result']).split('\n') p = subprocess.Popen(actual_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out = p.stdout.read() print ' ... Got %d bytes' % len(out) cursor.execute('insert into commands_%s (arg, timestamp, epoch, result) ' 'values ("%s", now(), %d, "%s");' % (cmd_name, arg, int(time.time()), base64.encodestring(out))) cursor.execute('commit;') if cleanup: too_old = datetime.datetime.now() too_old -= datetime.timedelta(days=14) cursor.execute('delete from commands_%s where timestamp < %s;' % (cmd_name, sql.FormatSqlValue('timestamp', too_old))) cursor.execute('commit;') return out.split('\n')
def SendUsers(cursor, window_size, table, project=None): one_day = datetime.timedelta(days=1) start_of_window = datetime.datetime.now() start_of_window -= one_day * window_size plike = '%' if project: plike = '%%%s%%' % project all_reviewers = [] cursor.execute( 'select distinct(username), max(day) from %s ' 'where data like "%s" and day > date(%s) group by username;' % (table, plike, sql.FormatSqlValue('timestamp', start_of_window))) for row in cursor: all_reviewers.append((row['username'], row['max(day)'].isoformat())) SendPacket({'type': 'users-all', 'payload': all_reviewers})
def GetInitial(eventname, showusers, project, initial_size): cursor = GetCursor() # Fetch the last seven days of results to start off with last_time = 0 one_day = datetime.timedelta(days=1) SendGroups(cursor) SendUsers(cursor, initial_size, '%ssummary' % eventname, project=project) SendPacket({'type': 'users-present', 'payload': showusers}) for username in showusers: day = datetime.datetime.now() day = datetime.datetime(day.year, day.month, day.day) day -= one_day * (initial_size - 1) for i in range(initial_size): timestamp = sql.FormatSqlValue('timestamp', day) cursor.execute('select * from %ssummary where username="******" ' 'and day=date(%s);' % (eventname, username, timestamp)) packet = { 'type': 'initial-value', 'user': username, 'day': day.isoformat() } if cursor.rowcount > 0: row = cursor.fetchone() packet['payload'] = json.loads(row['data']).get(project, 0) packet['written-at'] = row['epoch'] if row['epoch'] > last_time: last_time = row['epoch'] else: packet['payload'] = 0 SendPacket(packet) day += one_day SendPacket({'type': 'initial-value-ends'}) return last_time
def playgraph(self): """Generate a Google chart API graph of recent play history.""" play_graph = {} skip_graph = {} now = datetime.datetime.now() one_hour = datetime.timedelta(minutes=60) one_hour_ago = now - one_hour # Collect data from MySQL for row in db.GetRows('select song, plays, skips, last_action, ' 'last_played, last_skipped from tracks ' 'where last_action is not null and ' 'last_action > %s ' 'order by last_action desc;' % sql.FormatSqlValue('date', one_hour_ago)): if row['last_played']: delta = now - row['last_played'] secs = delta.seconds / 60 if secs < 3600: play_graph.setdefault(secs, 0) play_graph[secs] += 1 if row['last_skipped']: delta = now - row['last_skipped'] secs = delta.seconds / 60 if secs < 3600: skip_graph.setdefault(secs, 0) skip_graph[secs] += 1 play = '' skip = '' for i in range(60): play += '%d,' % play_graph.get(i, 0) skip += '%d,' % skip_graph.get(i, 0) return ('cht=bvg&' 'chbh=a&chds=0,10,0,10&chd=t:%s|%s&' 'chco=00FF00,FF0000' % (play[:-1], skip[:-1]))
def RebuildSummary(): users_cursor = feedutils.GetCursor() dates_cursor = feedutils.GetCursor() close_cursor = feedutils.GetCursor() users_cursor.execute('select distinct(username) from bugclose;') for user_row in users_cursor: print user_row['username'] dates_cursor.execute('select distinct(date(timestamp)) ' 'from bugclose where username = "******";' % user_row['username']) for date_row in dates_cursor: print ' %s' % date_row['(date(timestamp))'] timestamp = sql.FormatSqlValue('timestamp', date_row['(date(timestamp))']) summary = {'__total__': 0} close_cursor.execute('select * from bugclose where ' 'username = "******" and date(timestamp) = %s ' 'order by timestamp asc;' % (user_row['username'], timestamp)) for close in close_cursor: summary.setdefault(close['component'], 0) summary[close['component']] += 1 summary['__total__'] += 1 epoch = time.mktime(close['timestamp'].timetuple()) close_cursor.execute('delete from bugclosesummary where ' 'username="******" and day=date(%s);' % (user_row['username'], timestamp)) close_cursor.execute( 'insert into bugclosesummary' '(day, username, data, epoch) ' 'values (date(%s), "%s", \'%s\', %d);' % (timestamp, user_row['username'], json.dumps(summary), epoch)) close_cursor.execute('commit;')
def CreateRss(db, component, status): filename = '%s_%s.rss' % (component, status) f = open(filename, 'w') f.write("""<?xml version="1.0" ?> <rss version="2.0"> <channel> <title>%s code reviews (%s)</title> <link>https://review.openstack.org/</link> <description>openstack/nova code reviews</description> <language>en</language> <generator>stillhq.com</generator> <ttl>180</ttl> <docs>http://blogs.law.harvard.edu/tech/rss</docs> """ % (component, status)) cursor = db.cursor(MySQLdb.cursors.DictCursor) for change in ParseReviewList(db, component, status): # Attempt to parse the time in the review # 2012-05-13 21:29:57 UTC updated_at = datetime.datetime( *time.strptime(change['lastUpdated'], '%Y-%m-%d %H:%M:%S %Z')[0:5]) from_zone = tz.tzutc() to_zone = tz.tzlocal() utc_updated_at = updated_at.replace(tzinfo=from_zone) local_updated_at = utc_updated_at.astimezone(to_zone) print(' ... Time %s converted to %s UTC and %s local' % (change['lastUpdated'], utc_updated_at, local_updated_at)) files = [] for filename in GetFileList(db, time.mktime(local_updated_at.timetuple()), change['change']): files.append(filename) change['files'] = '\n<li>'.join(files) # Save to the DB cursor.execute( 'insert ignore into changes (changeid, timestamp, parsed) ' 'values("%s", %s, "%s");' % (change['change'], sql.FormatSqlValue( 'timestamp', local_updated_at), base64.encodestring(repr(change)))) cursor.execute('commit;') f.write(""" <item> <title>%(component)s: %(subject)s (%(status)s)</title> <pubDate>%(lastUpdated)s</pubDate> <description> %(description)s <br/><br/> <b>Files:</b><br/> <li>%(files)s </description> <link>%(url)s</link> <guid isPermaLink="true">%(url)s</guid> </item> """ % change) f.write("""</channel> </rss> """) f.close()
print '%s Added %s for user %s' % ( datetime.datetime.now(), id_row['id'], row['user']) ids = ids[100:] # Work out play lengths for row in db.GetRows('select path from paths where error is null ' 'and duration is null limit 1;'): try: duration = mad.MadFile(row['path']).total_time() print '%s MP3 length %s: %f ms' % (datetime.datetime.now(), row['path'], duration) db.ExecuteSql( 'update paths set duration=%f where path=%s;' % (duration, sql.FormatSqlValue('path', row['path']))) except Exception, e: try: db.ExecuteSql( 'update paths set error=%s where path=%s;' % (sql.FormatSqlValue('error', str(e)), sql.FormatSqlValue('path', row['path']))) except: pass #for row in db.GetRows('select distinct tracks.artist, tracks.album, ' # 'art.art from tracks left join art on ' # 'tracks.artist = art.artist and ' # 'tracks.album = art.album where ' # 'art.art is null and art.error is null and ' # 'tracks.artist is not null '
STATES.setdefault(component, {}) STATES[component].setdefault(importance, {}) STATES[component][importance].setdefault(day, []) STATES[component][importance][day].append(str(id)) STATES[component].setdefault('__total__', {}) STATES[component]['__total__'].setdefault(day, []) STATES[component]['__total__'][day].append(str(id)) day += datetime.timedelta(days=1) if __name__ == '__main__': FindDuplicates() RebuildStates() cursor = feedutils.GetCursor() cursor.execute('delete from bugcounts;') for component in STATES: for importance in STATES[component]: for day in STATES[component][importance]: timestamp = sql.FormatSqlValue('timestamp', day) STATES[component][importance][day].sort() cursor.execute('insert ignore into bugcounts ' '(component, importance, day, count, ids) values ' '("%s", "%s", %s, %d, "%s");' %(component, importance, timestamp, len(STATES[component][importance][day]), ' '.join(STATES[component][importance][day]))) cursor.execute('commit;')
def FromPath(self, path, attemptparse=True): """Rehydrate a track from its path.""" new_track = False updated = False id_row = self.db.GetOneRow( 'select track_id from paths where path = "%s";' % path) if id_row: id = id_row['track_id'] self.persistant = self.db.GetOneRow( 'select * from tracks where id=%d;' % id) if not self.persistant: raise Exception('No persistant data for track id: %s' % id) # There was a bug a while ago where we left underscores in the database. # Clean that up... for key in 'artist', 'album', 'song': if not key in self.persistant: raise Exception( 'Required key %s missing for track path %s' % (key, path)) before = self.persistant[key] after = self.persistant[key].replace('_', ' ') if before != after: self.persistant[key] = after updated = True else: # This is a new track self.persistant = {} self.persistant['creation_time'] = datetime.datetime.now() new_track = True # Attempt to parse the path using Mikal's file naming scheme. 30 chars is # is the field length for ID3, and might indicate the tags have been # truncated if attemptparse: if (not 'artist' in self.persistant or len(self.persistant.get('song', ' ')) == 30 or len(self.persistant.get('album', ' ')) == 30): m = _PATH_PARSE_RE.match(path) if m: try: self.persistant['artist'] = m.group(1).replace( '_', ' ') self.persistant['album'] = m.group(2).replace('_', ' ') self.persistant['song'] = m.group(4).replace('_', ' ') self.persistant['number'] = int(m.group(3)).replace( '_', ' ') updated = True except: pass # Use ID3 if we're missing information. This isn't as good as file parsing # as the ID3 fields have maximum lengths we sometimes hit. if not self.persistant.has_key('artist'): try: id3r = id3reader.Reader(path) if id3r.getValue('album'): self.persistant['album'] = id3r.getValue('album') updated = True if id3r.getValue('performer'): self.persistant['artist'] = id3r.getValue('performer') updated = True if id3r.getValue('title'): self.persistant['song'] = id3r.getValue('title') updated = True if id3r.getValue('track'): try: self.persistant['number'] = int(id3r.getValue('track')) updated = True except: pass except: pass # Perhaps we have this MP3 under another path if not self.persistant.has_key('artist'): try: self.FromMeta(self.persistant['artist'], self.persistant['album'], self.persistant['number'], self.persistant['song']) updated = True except: pass if new_track: # Now write this to the database to get an ID self.db.ExecuteSql( 'insert into tracks(artist, album, song, number) ' 'values(%s, %s, %s, %d);' % (sql.FormatSqlValue('artist', self.persistant['artist']), sql.FormatSqlValue('album', self.persistant['album']), sql.FormatSqlValue('song', self.persistant['song']), self.persistant.get('number', 0))) id = self.db.GetOneRow( 'select last_insert_id();')['last_insert_id()'] self.persistant['id'] = id if updated: print 'Updated track %s' % self.persistant['id'] self.Store()
def Reviews(component): cursor = feedutils.GetCursor() for l in dbcachingexecute.Execute(time.time() - 60, 'gerrit_query_approvals_json', ('ssh -i ~/.ssh/id_gerrit ' 'review.openstack.org gerrit query ' 'project:%s ' '--all-approvals --patch-sets ' '--format JSON'), component, cleanup=True): try: d = json.loads(l) except: continue if d.has_key('id'): b64 = base64.encodestring(l) checksum = hashlib.sha1(l).hexdigest() last_updated = datetime.datetime.fromtimestamp(d['lastUpdated']) timestamp = sql.FormatSqlValue('timestamp', last_updated) insert = ( 'insert ignore into changes (changeid, timestamp, parsed, ' 'checksum) values ("%s", %s, "%s", "%s");' % (d['id'], timestamp, b64, checksum)) cursor.execute(insert) if cursor.rowcount == 0: cursor.execute('select * from changes where changeid="%s";' % d['id']) stored_checksum = cursor.fetchone()['checksum'] if checksum != stored_checksum: cursor.execute('delete from changes where changeid="%s";' % d['id']) cursor.execute(insert) cursor.execute('commit;') for ps in d.get('patchSets', {}): patchset = ps.get('number') for review in ps.get('approvals', []): # Deliberately leave the timezone alone here so its consistant # with reports others generate. updated_at = datetime.datetime.fromtimestamp( review['grantedOn']) username = review['by'].get('username', 'unknown') if username in ['jenkins', 'smokestack']: continue timestamp = sql.FormatSqlValue('timestamp', updated_at) score = review.get('value', 0) cursor.execute( 'insert ignore into reviews ' '(changeid, username, timestamp, day, component, ' 'patchset, score) ' 'values ("%s", "%s", %s, date(%s), "%s", %s, %s);' % (d['id'], username, timestamp, timestamp, component, patchset, score)) if cursor.rowcount > 0: # This is a new review, we assume we're the only writer print 'New review from %s' % username cursor.execute('select * from reviewsummary where ' 'username="******" and day=date(%s);' % (username, timestamp)) if cursor.rowcount > 0: row = cursor.fetchone() summary = json.loads(row['data']) else: summary = {} summary.setdefault(component, 0) summary.setdefault('__total__', 0) summary[component] += 1 summary['__total__'] += 1 cursor.execute('delete from reviewsummary where ' 'username="******" and day=date(%s);' % (username, timestamp)) cursor.execute('insert into reviewsummary' '(day, username, data, epoch) ' 'values (date(%s), "%s", \'%s\', %d);' % (timestamp, username, json.dumps(summary), int(time.time()))) cursor.execute('commit;')
def Merge(self, other): """Merge another track with this one.""" # Logging just in case self.db.ExecuteSql( 'insert into events(timestamp, track_id, event, ' 'details) values (now(), %d, "merge: before", %s);' % (self.persistant['id'], sql.FormatSqlValue('details', repr(self.persistant)))) self.db.ExecuteSql( 'insert into events(timestamp, track_id, event, ' 'details) values (now(), %d, "merge: deleted", %s);' % (other.persistant['id'], sql.FormatSqlValue('details', repr(other.persistant)))) # Fields which can be summed for f in ['plays', 'skips']: self.persistant[f] = (self.persistant.get(f, 0) + other.persistant.get(f, 0)) # Date fields where we take the newest for f in ['last_played', 'last_skipped', 'last_action']: a = self.persistant.get(f, datetime.datetime(1970, 1, 1)) b = other.persistant.get(f, datetime.datetime(1970, 1, 1)) if a > b: v = a else: v = b if v != datetime.datetime(1970, 1, 1): self.persistant[f] = v # Date fields where we take the oldest for f in ['creation_time']: a = self.persistant.get(f, datetime.datetime(1970, 1, 1)) b = other.persistant.get(f, datetime.datetime(1970, 1, 1)) if a < b: v = a else: v = b if v != datetime.datetime(1970, 1, 1): self.persistant[f] = v # Fields where we only clobber ours if we don't have a value for f in ['artist', 'album', 'song']: if not self.persistant.has_key(f) or not self.persistant[f]: self.persistant[f] = other.persistant.get(f, None) # Sometimes the number is a placeholder if self.persistant.has_key( 'number') and self.persistant['number'] == -1: self.persistant['number'] = other.persistant.get('number', -1) if not self.persistant.has_key('number'): self.persistant['number'] = other.persistant.get('number', -1) # Update the id in the tags table tags = self.db.GetRows('select tag from tags where track_id=%d;' % other.persistant['id']) self.db.ExecuteSql( 'insert into events(timestamp, track_id, event, ' 'details) values (now(), %d, "merge: tags: %d", %s);' % (self.persistant['id'], other.persistant['id'], sql.FormatSqlValue('details', repr(tags)))) try: self.db.ExecuteSql( 'update tags set track_id=%d where track_id=%d;' % (self.persistant['id'], other.persistant['id'])) self.db.ExecuteSql('commit;') except: # This can happen if the is already a matching tag for the first track pass # Update the id in the paths table paths = self.db.GetRows('select path from paths where track_id=%d;' % other.persistant['id']) self.db.ExecuteSql( 'insert into events(timestamp, track_id, event, ' 'details) values (now(), %d, "merge: paths: %d", %s);' % (self.persistant['id'], other.persistant['id'], sql.FormatSqlValue('details', repr(paths)))) self.db.ExecuteSql('update paths set track_id=%d where track_id=%d;' % (self.persistant['id'], other.persistant['id'])) self.db.ExecuteSql('commit;') self.db.ExecuteSql( 'insert into events(timestamp, track_id, event, ' 'details) values (now(), %d, "merge: after", %s);' % (self.persistant['id'], sql.FormatSqlValue('details', repr(self.persistant)))) self.db.ExecuteSql('commit;')
def ScrapeProject(projectname, days): launchpad = Launchpad.login_with('openstack-lp-scripts', 'production', CACHEDIR) proj = launchpad.projects[projectname] cursor = feedutils.GetCursor() subcursor = feedutils.GetCursor() now = datetime.datetime.now() since = datetime.datetime(now.year, now.month, now.day) since -= datetime.timedelta(days=days) bugs = proj.searchTasks(modified_since=since, status=[ "New", "Incomplete", "Invalid", "Won't Fix", "Confirmed", "Triaged", "In Progress", "Fix Committed", "Fix Released" ]) for b in bugs: if ONLY and b.bug.id not in ONLY: continue status_toucher = None importance_toucher = None triage_timestamp = None print '\n%s' % b.title print 'Reported by: %s' % b.bug.owner.name if WRITE: cursor.execute( 'insert ignore into bugs%s ' '(id, title, reporter, timestamp, component) ' 'values(%s, %s, "%s", %s, "%s");' % (VERSION, b.bug.id, sql.FormatSqlValue( 'title', b.bug.title), b.bug.owner.name, sql.FormatSqlValue('timestamp', b.bug.date_created), projectname)) cursor.execute('commit;') for dup in getattr(b.bug, 'duplicates', []): print 'Duplicate: %s' % dup.id cursor.execute( 'update bugs%s set duplicateof=%s where id=%s;' % (VERSION, b.bug.id, dup.id)) cursor.execute('commit;') for bugtask in b.bug.bug_tasks: print(' Targetted to %s on %s by %s' % (bugtask.bug_target_name, bugtask.date_created, bugtask.owner.name)) if WRITE: timestamp = sql.FormatSqlValue('timestamp', bugtask.date_created) cursor.execute( 'insert ignore into bugevents%s ' '(id, component, timestamp, username, ' 'field, pre, post) ' 'values(%s, "%s", %s, "%s", "targetted", NULL, ' '"%s");' % (VERSION, b.bug.id, bugtask.bug_target_name, timestamp, bugtask.owner.name, bugtask.bug_target_name)) cursor.execute( 'insert ignore into bugevents%s ' '(id, component, timestamp, username, ' 'field, pre, post) ' 'values(%s, "%s", %s, "%s", "importance", "-", ' '"%s");' % (VERSION, b.bug.id, bugtask.bug_target_name, timestamp, bugtask.owner.name, bugtask.importance)) cursor.execute('insert ignore into bugevents%s ' '(id, component, timestamp, username, ' 'field, pre, post) ' 'values(%s, "%s", %s, "%s", "status", "-", ' '"%s");' % (VERSION, b.bug.id, bugtask.bug_target_name, timestamp, bugtask.owner.name, bugtask.status)) cursor.execute('commit;') if bugtask.assignee: time_diff = bugtask.date_assigned - bugtask.date_created if time_diff.seconds < 60: print( ' *** special case: bug assigned to %s on task ' 'creation ***' % bugtask.assignee) if WRITE: timestamp = sql.FormatSqlValue('timestamp', bugtask.date_assigned) cursor.execute( 'insert ignore into bugevents%s ' '(id, component, timestamp, username, ' 'field, pre, post) ' 'values(%s, "%s", %s, "%s", "assignee", "-", ' '"%s");' % (VERSION, b.bug.id, bugtask.bug_target_name, timestamp, bugtask.owner.name, bugtask.assignee.name)) cursor.execute('commit;') for activity in b.bug.activity: if activity.whatchanged.startswith('%s: ' % projectname): timestamp = sql.FormatSqlValue('timestamp', activity.datechanged) oldvalue = activity.oldvalue newvalue = activity.newvalue try: m = PERSON_RE.match(oldvalue) if m: oldvalue = m.group(1) except: pass try: m = PERSON_RE.match(newvalue) if m: newvalue = m.group(1) except: pass print(' %s :: %s -> %s :: %s on %s' % (activity.whatchanged, oldvalue, newvalue, activity.person.name, activity.datechanged)) if WRITE: cursor.execute('insert ignore into bugevents%s ' '(id, component, timestamp, username, ' 'field, pre, post) ' 'values(%s, "%s", %s, "%s", "%s", "%s", ' '"%s");' % (VERSION, b.bug.id, projectname, timestamp, activity.person.name, activity.whatchanged.split(': ')[1], oldvalue, newvalue)) cursor.execute('commit;') # We define a triage as changing the status from New, and # changing the importance from Undecided. You must do both # to earn a cookie. if (activity.whatchanged.endswith(' status') and (activity.oldvalue in ['New', 'Incomplete']) and (activity.newvalue in ['Confirmed', 'Triaged'])): status_toucher = activity.person.name if (not triage_timestamp or activity.datechanged > triage_timestamp): triage_timestamp = activity.datechanged if (activity.whatchanged.endswith(' importance') and (activity.oldvalue == 'Undecided')): importance_toucher = activity.person.name if (not triage_timestamp or activity.datechanged > triage_timestamp): triage_timestamp = activity.datechanged # Marking a bug as invalid is a special super cookie! if (activity.whatchanged.endswith(' status') and (activity.newvalue in ['Invalid', 'Opinion'])): status_toucher = activity.person.name importance_toucher = activity.person.name triage_timestamp = activity.datechanged # Code review sent for a bug if (activity.whatchanged.endswith(' status') and (activity.newvalue in ['In Progress'])): # Find out who the bug was assigned to cursor.execute('select * from bugevents%s where ' 'id=%s and component="%s" and ' 'field="assignee" and ' 'timestamp < %s ' 'order by timestamp desc limit 1;' % (VERSION, b.bug.id, projectname, timestamp)) for row in cursor: user = row['post'] if WRITE: UpdateTrackingTables('progress', b, projectname, timestamp, user) subcursor.execute('commit;') # A bug was marked as fixed if (activity.whatchanged.endswith(' status') and (activity.newvalue in ['Fix Committed'])): # Find out who the bug was assigned to cursor.execute('select * from bugevents%s where ' 'id=%s and component="%s" and ' 'field="assignee" and ' 'timestamp < %s ' 'order by timestamp desc limit 1;' % (VERSION, b.bug.id, projectname, timestamp)) for row in cursor: user = row['post'] if WRITE: subcursor.execute( 'update bugs%s set ' 'closedby="%s" where id=%s ' 'and component="%s";' % (VERSION, user, b.bug.id, projectname)) subcursor.execute('commit;') print ' *** %s closed this bug ***' % user UpdateTrackingTables('close', b, projectname, timestamp, user) subcursor.execute('commit;') # A bug was unfixed if (activity.whatchanged.endswith(' status') and (activity.oldvalue in ['Fix Committed']) and (not activity.newvalue in ['Fix Released'])): if WRITE: cursor.execute('update bugs%s set closedby = null ' 'where id=%s and component="%s";' % (VERSION, b.bug.id, projectname)) cursor.execute('commit;') print ' *** This bug was unclosed ***' if (status_toucher and importance_toucher and (status_toucher == importance_toucher)): print ' *** %s triaged this bug ***' % status_toucher timestamp = sql.FormatSqlValue('timestamp', triage_timestamp) if WRITE: UpdateTrackingTables('triage', b, projectname, timestamp, status_toucher)
form['reviewers'].value) # Fetch the last seven days of results to start off with initial_size = 30 one_day = datetime.timedelta(days=1) last_timestamp = datetime.datetime(1970, 1, 1) feedutils.SendGroups(cursor) feedutils.SendReviewers(cursor, '__total__', initial_size) feedutils.SendPacket({'type': 'users-present', 'payload': showusers}) for username in showusers: day = datetime.datetime.now() day = datetime.datetime(day.year, day.month, day.day) day -= one_day * (initial_size - 1) timestamp = sql.FormatSqlValue('timestamp', day) cursor.execute('select *, date(timestamp) from reviews where ' 'username="******" and timestamp > %s ' 'order by timestamp asc;' % (username, timestamp)) for row in cursor: packet = { 'type': 'initial-value', 'user': username, 'day': row['timestamp'].isoformat(), 'payload': row['score'] } feedutils.SendPacket(packet) if row['timestamp'] > last_timestamp: last_timestamp = row['timestamp']
def Collect(cursor): DNS.DiscoverNameServers() user_cache = {} remote_db = MySQLdb.connect(user=AUTH['username'], db=AUTH['database'], passwd=AUTH['password'], host=AUTH['hostname']) remote_cursor = remote_db.cursor(MySQLdb.cursors.DictCursor) now = datetime.datetime.now() one_minute = datetime.timedelta(minutes=1) five_minutes = datetime.timedelta(minutes=5) # We need to be five minutes behind so the summary db can keep up now -= five_minutes for i in range(24 * 60): # Internal flows aren't logged in the db, so we ignore zii here to # avoid double counting flows originating from zii statement = ( 'select internalip, sum(bytes) from flows_%04d%02d where ' 'time >= %s and time < %s and node="zii" group by internalip;' % (now.year, now.month, sql.FormatSqlValue( 'date', now - five_minutes), sql.FormatSqlValue('date', now))) remote_cursor.execute(statement) usage = {} epoch = time.mktime((now - five_minutes).timetuple()) for row in remote_cursor: ip = row['internalip'] print '%s %s %s' % (now - five_minutes, ip, row['sum(bytes)']) name = 'Netflow' if ip == '192.168.1.20': name = 'Gateway Netflow' cursor.execute('insert ignore into sensors ' '(epoch_seconds, sensor, value, hostname) ' 'values(%s, "%s", "%s", "%s");' % (epoch, name, row['sum(bytes)'], ip)) cursor.execute('commit;') # Determine the "user" for this IP if ip not in user_cache: try: ip_rev = ip.split('.') ip_rev.reverse() arpa = '%s.in-addr.arpa' % '.'.join(ip_rev) hostname = DNS.dnslookup(arpa, qtype='PTR')[0] owner = DNS.dnslookup(hostname, qtype='TXT')[0][0] print('%s Looking up the owner of %s gave %s' % (datetime.datetime.now(), ip, owner)) except Exception, e: print 'Owner lookup error for %s: %s' % (ip, e) owner = 'Unknown' user_cache[ip] = owner print '%s Owner of this IP is %s' % (datetime.datetime.now(), user_cache[ip]) usage.setdefault(user_cache[ip], 0) usage[user_cache[ip]] += row['sum(bytes)'] for owner in usage: cursor.execute('insert ignore into sensors ' '(epoch_seconds, sensor, value, hostname) ' 'values(%s, "User Netflow", "%s", "%s");' % (epoch, usage[owner], owner)) cursor.execute('commit;') now -= one_minute