def main(): node = models.Node.load(settings.TABULATE_EMAILS_NODE_ID) user = models.User.load(settings.TABULATE_EMAILS_USER_ID) emails = get_emails_since(settings.TABULATE_EMAILS_TIME_DELTA) sio = StringIO() utils.make_csv(sio, emails, ['affiliation', 'count']) utils.send_file(settings.TABULATE_EMAILS_FILE_NAME, settings.TABULATE_EMAILS_CONTENT_TYPE, sio, node, user)
def main(): node = models.Node.load(NODE_ID) user = models.User.load(USER_ID) emails = get_emails_since(TIME_DELTA) sio = StringIO() utils.make_csv(sio, emails, ['affiliation', 'count']) utils.send_file(app, FILE_NAME, CONTENT_TYPE, sio, node, user)
def write_counts(counts, outname): summed_counts = sorted( [ (user, sum(values.values())) for user, values in counts.iteritems() ], key=lambda item: item[1], reverse=True, ) with open(outname, 'w') as fp: utils.make_csv( fp, [ [ user._id, user.fullname, total, ] for user, total in summed_counts ], [ 'user-id', 'user-name', 'usage', ], )
def main(): node = models.Node.load(NODE_ID) user = models.User.load(USER_ID) cutoff = datetime.datetime.utcnow() - TIME_OFFSET result = run_map_reduce(query={'date': {'$gt': cutoff}}) sio = StringIO() utils.make_csv( sio, ((row['_id'], row['value']) for row in result.find().sort([('value', pymongo.DESCENDING)])), ['name', 'count'], ) utils.send_file(app, FILE_NAME, CONTENT_TYPE, sio, node, user)
def main(): node = models.Node.load(settings.TABULATE_LOGS_NODE_ID) user = models.User.load(settings.TABULATE_LOGS_USER_ID) cutoff = datetime.datetime.utcnow() - settings.TABULATE_LOGS_TIME_OFFSET result = run_map_reduce(query={'date': {'$gt': cutoff}}) sio = StringIO() utils.make_csv( sio, ( (row['_id'], row['value']) for row in result.find().sort([('value', pymongo.DESCENDING)]) ), ['name', 'count'], ) utils.send_file(app, settings.TABULATE_LOGS_FILE_NAME, settings.TABULATE_LOGS_CONTENT_TYPE, sio, node, user)
def main(): node = models.Node.load(settings.TABULATE_LOGS_NODE_ID) user = models.User.load(settings.TABULATE_LOGS_USER_ID) cutoff = datetime.datetime.utcnow() - settings.TABULATE_LOGS_TIME_OFFSET result = run_map_reduce(query={'date': {'$gt': cutoff}}) sio = StringIO() utils.make_csv( sio, ((row['_id'], row['value']) for row in result.find().sort([('value', pymongo.DESCENDING)])), ['name', 'count'], ) utils.create_object(settings.TABULATE_LOGS_FILE_NAME, settings.TABULATE_LOGS_CONTENT_TYPE, node, user, stream=sio, kind='file')