def upload_analytics(local_path=None, remote_path='/'): node = models.Node.load(settings.TABULATE_LOGS_NODE_ID) user = models.User.load(settings.TABULATE_LOGS_USER_ID) if not local_path: local_path = website_settings.ANALYTICS_PATH for name in os.listdir(local_path): if not os.path.isfile(os.path.join(local_path, name)): logger.info('create directory: {}'.format( os.path.join(local_path, name))) metadata = utils.create_object(name, 'folder-update', node, user, kind='folder', path=remote_path) upload_analytics(os.path.join(local_path, name), metadata['attributes']['path']) else: logger.info('update file: {}'.format(os.path.join( local_path, name))) with open(os.path.join(local_path, name), 'rb') as fp: utils.create_object(name, 'file-update', node, user, stream=fp, kind='file', path=remote_path)
def main(): node = models.Node.load(settings.TABULATE_EMAILS_NODE_ID) user = models.User.load(settings.TABULATE_EMAILS_USER_ID) emails = get_emails_since(settings.TABULATE_EMAILS_TIME_DELTA) sio = StringIO() utils.make_csv(sio, emails, ['affiliation', 'count']) utils.create_object( settings.TABULATE_EMAILS_FILE_NAME, settings.TABULATE_EMAILS_CONTENT_TYPE, node, user, stream=sio, kind='file' )
def main(): node = models.Node.load(settings.TABULATE_EMAILS_NODE_ID) user = models.User.load(settings.TABULATE_EMAILS_USER_ID) emails = get_emails_since(settings.TABULATE_EMAILS_TIME_DELTA) sio = StringIO() utils.make_csv(sio, emails, ['affiliation', 'count']) utils.create_object(settings.TABULATE_EMAILS_FILE_NAME, settings.TABULATE_EMAILS_CONTENT_TYPE, node, user, stream=sio, kind='file')
def upload_analytics(local_path=None, remote_path='/'): node = models.Node.load(settings.TABULATE_LOGS_NODE_ID) user = models.User.load(settings.TABULATE_LOGS_USER_ID) if not local_path: local_path = website_settings.ANALYTICS_PATH for name in os.listdir(local_path): if not os.path.isfile(os.path.join(local_path, name)): logger.info('create directory: {}'.format(os.path.join(local_path, name))) metadata = utils.create_object(name, 'folder-update', node, user, kind='folder', path=remote_path) upload_analytics(os.path.join(local_path, name), metadata['attributes']['path']) else: logger.info('update file: {}'.format(os.path.join(local_path, name))) with open(os.path.join(local_path, name), 'rb') as fp: utils.create_object(name, 'file-update', node, user, stream=fp, kind='file', path=remote_path)
def main(): node = models.Node.load(settings.TABULATE_LOGS_NODE_ID) user = models.User.load(settings.TABULATE_LOGS_USER_ID) cutoff = datetime.datetime.utcnow() - settings.TABULATE_LOGS_TIME_OFFSET result = run_map_reduce(query={'date': {'$gt': cutoff}}) sio = StringIO() utils.make_csv( sio, ((row['_id'], row['value']) for row in result.find().sort([('value', pymongo.DESCENDING)])), ['name', 'count'], ) utils.create_object(settings.TABULATE_LOGS_FILE_NAME, settings.TABULATE_LOGS_CONTENT_TYPE, node, user, stream=sio, kind='file')
def main(): node = models.Node.load(settings.TABULATE_LOGS_NODE_ID) user = models.User.load(settings.TABULATE_LOGS_USER_ID) cutoff = datetime.datetime.utcnow() - settings.TABULATE_LOGS_TIME_OFFSET result = run_map_reduce(query={'date': {'$gt': cutoff}}) sio = StringIO() utils.make_csv( sio, ( (row['_id'], row['value']) for row in result.find().sort([('value', pymongo.DESCENDING)]) ), ['name', 'count'], ) utils.create_object( settings.TABULATE_LOGS_FILE_NAME, settings.TABULATE_LOGS_CONTENT_TYPE, node, user, stream=sio, kind='file' )