Example #1
0
def clear_sessions(ctx, months=1, dry_run=False):
    from website.app import init_app

    init_app(routes=False, set_backends=True)
    from scripts import clear_sessions

    clear_sessions.clear_sessions_relative(months=months, dry_run=dry_run)
def main():
    dry_run = '--dry' in sys.argv
    if not dry_run:
        script_utils.add_file_logger(logger, __file__)
    init_app(set_backends=True, routes=False)
    with transaction.atomic():
        migrate(dry_run)
def main(dry=True):

    init_app(routes=False)
    with transaction.atomic():
        do_populate(clear=True)
        if dry:
            raise Exception('Abort Transaction - Dry Run')
Example #4
0
def main():
    init_app(routes=False)  # Sets the storage backends on all models
    dry = '--dry' in sys.argv
    if not dry:
        script_utils.add_file_logger(logger, __file__)
    with TokuTransaction():
        log_duplicate_acount(dry)
Example #5
0
def main(dry=True):
    init_app(set_backends=True, routes=False)  # Sets the storage backends on all models

    with TokuTransaction():
        do_migration()
        if dry:
            raise Exception('Abort Transaction - Dry Run')
Example #6
0
def main():
    parser = argparse.ArgumentParser(
        description='Run w/o args for a dry run or with --no-i-mean-it for the real thing.'
    )
    parser.add_argument(
        '--no-i-mean-it',
        action='store_true',
        dest='for_reals',
        help='Run migration and commit changes to db',
    )
    parser.add_argument(
        '--reverse',
        action='store_true',
        dest='reverse',
        help='Run migration in reverse. (e.g. foo.gdoc => foo)',
    )
    parser.add_argument(
        '--audit',
        action='store_true',
        dest='audit',
        help='Collect stats on mime-types and extensions in the db',
    )
    args = parser.parse_args()

    if args.for_reals:
        script_utils.add_file_logger(logger, __file__)

    init_app(set_backends=True, routes=False)
    if args.audit:
        audit()
    else:
        with TokuTransaction():
            migrate(args.reverse)
            if not args.for_reals:
                raise RuntimeError('Dry Run -- Transaction rolled back')
Example #7
0
def main():
    init_app(routes=False)
    dry_run = '--dry' in sys.argv
    with TokuTransaction():
        lowercase_nids()
        if dry_run:
            raise Exception('Dry run')
def main(dry_run=True):
    init_app(routes=False)
    from osf.models import AbstractNode
    from website.project.utils import activity

    popular_activity = activity()

    popular_nodes = popular_activity['popular_public_projects']
    popular_links_node = AbstractNode.find_one(Q('_id', 'eq', POPULAR_LINKS_NODE))
    popular_registrations = popular_activity['popular_public_registrations']
    popular_links_registrations = AbstractNode.find_one(Q('_id', 'eq', POPULAR_LINKS_REGISTRATIONS))

    update_node_links(popular_links_node, popular_nodes, 'popular')
    update_node_links(popular_links_registrations, popular_registrations, 'popular registrations')
    try:
        popular_links_node.save()
        logger.info('Node links on {} updated.'.format(popular_links_node._id))
    except (KeyError, RuntimeError) as error:
        logger.error('Could not migrate popular nodes due to error')
        logger.exception(error)

    try:
        popular_links_registrations.save()
        logger.info('Node links for registrations on {} updated.'.format(popular_links_registrations._id))
    except (KeyError, RuntimeError) as error:
        logger.error('Could not migrate popular nodes for registrations due to error')
        logger.exception(error)

    if dry_run:
        raise RuntimeError('Dry run -- transaction rolled back.')
Example #9
0
def main(send_email=False):
    logger.info('Starting Project storage audit')
    init_app(set_backends=True, routes=False)

    lines = []
    projects = {}
    users = defaultdict(lambda: (0, 0))

    for node in Node.find(Q('__backrefs.parent.node.nodes', 'eq', None)):  # ODM hack to ignore all nodes with parents
        if node._id in WHITE_LIST:
            continue  # Dont count whitelisted nodes against users
        projects[node] = get_usage(node)
        for contrib in node.contributors:
            if node.can_edit(user=contrib):
                users[contrib] = tuple(map(sum, zip(users[contrib], projects[node])))  # Adds tuples together, map(sum, zip((a, b), (c, d))) -> (a+c, b+d)

    for collection, limit in ((users, USER_LIMIT), (projects, PROJECT_LIMIT)):
        for item, (used, deleted) in filter(functools.partial(limit_filter, limit), collection.items()):
            line = '{!r} has exceeded the limit {:.2f}GBs ({}b) with {:.2f}GBs ({}b) used and {:.2f}GBs ({}b) deleted.'.format(item, limit / GBs, limit, used / GBs, used, deleted / GBs, deleted)
            logger.info(line)
            lines.append(line)

    if lines:
        if send_email:
            logger.info('Sending email...')
            mails.send_mail('*****@*****.**', mails.EMPTY, body='\n'.join(lines), subject='Script: OsfStorage usage audit')
        else:
            logger.info('send_email is False, not sending email'.format(len(lines)))
        logger.info('{} offending project(s) and user(s) found'.format(len(lines)))
    else:
        logger.info('No offending projects or users found')
def main():
    if 'dry' not in sys.argv:
        scripts_utils.add_file_logger(logger, __file__)
    # Set up storage backends
    init_app(routes=False)
    logger.info('{n} invalid GUID objects found'.format(n=len(get_targets())))
    logger.info('Finished.')
def main(dry_run=True):
    init_app(routes=False)

    popular_node_ids = popular_activity_json()['popular_node_ids']
    popular_links_node = models.Node.find_one(Q('_id', 'eq', POPULAR_LINKS_NODE))
    new_and_noteworthy_links_node = models.Node.find_one(Q('_id', 'eq', NEW_AND_NOTEWORTHY_LINKS_NODE))
    new_and_noteworthy_node_ids = get_new_and_noteworthy_nodes()

    update_node_links(popular_links_node, popular_node_ids, 'popular')
    update_node_links(new_and_noteworthy_links_node, new_and_noteworthy_node_ids, 'new and noteworthy')

    try:
        popular_links_node.save()
        logger.info('Node links on {} updated.'.format(popular_links_node._id))
    except (KeyError, RuntimeError) as error:
        logger.error('Could not migrate popular nodes due to error')
        logger.exception(error)

    try:
        new_and_noteworthy_links_node.save()
        logger.info('Node links on {} updated.'.format(new_and_noteworthy_links_node._id))
    except (KeyError, RuntimeError) as error:
        logger.error('Could not migrate new and noteworthy nodes due to error')
        logger.exception(error)

    if dry_run:
        raise RuntimeError('Dry run -- transaction rolled back.')
def run_main(dry_run=True):
    if not dry_run:
        scripts_utils.add_file_logger(logger, __file__)
    init_app(routes=False)
    with TokuTransaction():
        main()
        if dry_run:
            raise RuntimeError("Dry run, rolling back transaction")
def main():
    dry = '--dry' in sys.argv
    script_utils.add_file_logger(logger, __file__)
    init_app(set_backends=True, routes=False)
    with TokuTransaction():
        migrate()
        if dry:
            raise RuntimeError('Dry run -- Transaction rolled back')
def main(dry=True):
    init_app(set_backends=True, routes=False)
    if not dry:
        scripts_utils.add_file_logger(logger, __file__)
    prereg = MetaSchema.find_one(
            Q('name', 'eq', "Prereg Challenge"))
    migrate_drafts_q5_metadata(prereg)
    migrate_registrations_q5_metadata(prereg)
def main(dry_run=True):
    init_app()
    nodes = find_file_mismatch_nodes()
    print('Migrating {0} nodes'.format(len(nodes)))
    if dry_run:
        return
    for node in nodes:
        migrate_node(node)
Example #16
0
def main():
    init_app(routes=False)
    dry_run = '--dry' in sys.argv
    if dry_run:
        logger.warn('Running a dry run')
    if not dry_run:
        script_utils.add_file_logger(logger, __file__)
    with TokuTransaction():
        migrate(dry=dry_run)
Example #17
0
def main():
    init_app(routes=False)  # Sets the storage backends on all models
    dry = '--dry' in sys.argv
    if not dry:
        script_utils.add_file_logger(logger, __file__)
    with TokuTransaction():
        do_migration(get_targets(), dry)
        if dry:
            raise RuntimeError('Dry run, transaction rolled back')
def run_main(dry_run=True):
    init_app(routes=False)
    if not dry_run:
        # If we're not running in dry mode log everything to a file
        script_utils.add_file_logger(logger, __file__)

    # Finally run the migration
    with transaction.atomic():
        main(dry_run=dry_run)
Example #19
0
def main():
    dry_run = "--dry" in sys.argv
    if not dry_run:
        script_utils.add_file_logger(logger, __file__)
    init_app(set_backends=True, routes=False)
    with TokuTransaction():
        migrate(dry_run=dry_run)
        if dry_run:
            raise RuntimeError("Dry run, transaction rolled back.")
def main():
    dry_run = '--dry' in sys.argv
    if dry_run:
        logger.warn('DRY RUN mode')
    else:
        utils.add_file_logger(logger, __file__)
    init_app(routes=False)
    with TokuTransaction():
        migrate(dry_run)
def main(dry=True):
    init_app(set_backends=True, routes=False)
    if not dry:
        scripts_utils.add_file_logger(logger, __file__)
    veer = MetaSchema.find_one(
            Q('name', 'eq',
              "Pre-Registration in Social Psychology (van 't Veer & Giner-Sorolla, 2016): Pre-Registration"))
    migrate_drafts_metadata_key(veer)
    migrate_registrations_metadata_key(veer)
Example #22
0
def main():
    dry_run = False
    if '--dry' in sys.argv:
        dry_run = True
    if not dry_run:
        script_utils.add_file_logger(logger, __file__)
    init_app(set_backends=True, routes=False)
    with TokuTransaction():
        migrate(dry_run=dry_run)
def main():
    init_app(routes=False)  # Sets the storage backends on all models
    if 'dry' in sys.argv:
        user_list = get_targets()
        for user in user_list:
            log_info(user)
        logger.info('[dry] Migrated {0} users'.format(len(user_list)))
    else:
        do_migration(get_targets())
Example #24
0
def main(dry=True):
    init_app(set_backends=True, routes=False)  # Sets the storage backends on all models

    # Start a transaction that will be rolled back if any exceptions are un
    with TokuTransaction():
        do_migration()
        if dry:
            # When running in dry mode force the transaction to rollback
            raise Exception('Abort Transaction - Dry Run')
Example #25
0
def main():
    init_app(set_backends=True)
    with open(BLACKLIST_FILE, 'r') as reader:
        blacklist = [item.rstrip('\n') for item in reader]

    chunk_size = len(blacklist)/4
    chunks = [blacklist[0:chunk_size], blacklist[chunk_size:(chunk_size*2)], blacklist[(chunk_size*2):(chunk_size*3)], blacklist[(chunk_size*3):]]
    for c in chunks:
        create_blacklist_guid_objects(c)
def main():
    init_app(set_backends=True, routes=False)
    dry_run = '--dry' in sys.argv
    if not dry_run:
        script_utils.add_file_logger(logger, __file__)
    with transaction.atomic():
        update_taxonomies('bepress_taxonomy.json')
        if dry_run:
            raise RuntimeError('Dry run, transaction rolled back')
Example #27
0
def main(dry):
    if dry:
        logger.info('[DRY MODE]')
    init_app(routes=False)
    for _id in FAILED_ARCHIVE_JOBS:
        archive_job = ArchiveJob.load(_id)
        assert archive_job.status == ARCHIVER_INITIATED
        root_node = archive_job.dst_node.root
        with TokuTransaction():
            clean(reg=root_node, dry=dry)
Example #28
0
def main():
    init_app()

    download_count_summary = DownloadCountSummary()
    date = datetime.date(2018, 1, 1)

    while date < datetime.date.today():
        events = download_count_summary.get_events(date)
        download_count_summary.send_events(events)
        date += datetime.timedelta(days=1)
Example #29
0
def main():
    init_app(routes=False)
    guid = sys.argv[1]
    dry = '--dry' in sys.argv
    if not dry:
        script_utils.add_file_logger(logger, __file__)
    with TokuTransaction():
        restore_file(guid)
        if dry:
            raise RuntimeError('Dry run - rolling back transaction')
Example #30
0
def main():
    init_app(routes=False)
    dry = '--dry' in sys.argv
    if not dry:
        # If we're not running in dry mode log everything to a file
        script_utils.add_file_logger(logger, __file__)
    with TokuTransaction():
        migrate()
        if dry:
            raise Exception('Abort Transaction - Dry Run')
Example #31
0
def main():
    init_app(set_backends=True, routes=False)
    dev = 'dev' in sys.argv
    populate_conferences(dev=dev)
Example #32
0
def main():
    init_app(routes=False)  # Sets the storage backends on all models
    sitemap = Sitemap()
    sitemap.generate()
    sitemap.cleanup()
                            log._id)
        else:
            logger.warning('No parent registration found for retraction log ' +
                           log._id)


def get_targets():
    # ... return the list of logs whose registrations we want to migrate ...
    targets = NodeLog.find(Q('action', 'eq', 'retraction_approved'))

    logger.info('Retractions found: {}'.format(len(targets)))
    return targets


if __name__ == '__main__':
    dry = '--dry' in sys.argv
    if not dry:
        script_utils.add_file_logger(logger, __file__)

    with TokuTransaction():
        init_app(set_backends=True,
                 routes=False)  # Sets the storage backends on all models
        targets = get_targets()
        for target in targets:
            logger.info('{} {}: {}'.format(target.date,
                                           target.params.get('registration'),
                                           target.action))
        do_migration(targets)
        if dry:
            raise RuntimeError('Dry run, rolling back transaction.')
Example #34
0
        return find_child_and_grandchild(grandpa, childIndex=childIndex + 1)
    return parent, child


def create_subject_rules():
    top_levels = Subject.find(Q('parents', 'eq', []))
    subA = top_levels[0]
    subB = top_levels[1]
    subC = top_levels[2]

    children_of_A = Subject.find(Q('parents', 'eq', subA))
    subD = children_of_A[0]
    subE = children_of_A[1]

    subF, subG = find_child_and_grandchild(subB)

    rules = [([subA._id, subD._id], False), ([subA._id, subE._id], True),
             ([subB._id, subF._id, subG._id], True), ([subC._id], True)]
    return rules


def main():
    provider = PreprintProvider.find()[0]
    provider.subjects_acceptable = create_subject_rules()
    provider.save()


if __name__ == '__main__':
    init_app(set_backends=True)
    main()
Example #35
0
"""Due to an unknown bug, wiki pages were saved without dates between
September 4 and 6. This script identifies wiki pages without dates and
imputes dates using ObjectIds.

Dry run: python -m scripts/consistency/impute_wiki_date
Real: python -m scripts/consistency/impute_wiki_date false

"""

from bson import ObjectId

from website.app import init_app
from website import models
from framework import Q

app = init_app()


def impute_wiki_date(dry_run=True):
    no_date = models.NodeWikiPage.find(Q('date', 'eq', None))
    for wiki in no_date:
        oid = ObjectId(wiki._primary_key)
        imputed_date = oid.generation_time
        print u'Imputing date {} for wiki ID {}'.format(
            imputed_date.strftime('%c'),
            wiki._primary_key,
        )
        if not dry_run:
            wiki._fields['date'].__set__(wiki, imputed_date, safe=True)
            wiki.save()
Example #36
0
def run_main(job_id=None, dry_run=True):
    init_app(set_backends=True, routes=False)
    if not dry_run:
        scripts_utils.add_file_logger(logger, __file__)
    main(job_id=job_id)
Example #37
0
def main(dry=True):
    init_app(routes=False)
    with transaction.atomic():
        do_migration()
        if dry:
            raise Exception('Abort Transaction - Dry Run')
Example #38
0
        'password': {
            '$ne': None
        },
        'is_merged': {
            '$ne': True
        },
        'date_confirmed': {
            '$gte': datetime.datetime.utcnow() - delta
        },
    })


def main():
    node = models.Node.load(settings.TABULATE_EMAILS_NODE_ID)
    user = models.User.load(settings.TABULATE_EMAILS_USER_ID)
    emails = get_emails_since(settings.TABULATE_EMAILS_TIME_DELTA)
    sio = StringIO()
    utils.make_csv(sio, emails, ['affiliation', 'count'])

    utils.create_object(settings.TABULATE_EMAILS_FILE_NAME,
                        settings.TABULATE_EMAILS_CONTENT_TYPE,
                        node,
                        user,
                        stream=sio,
                        kind='file')


if __name__ == '__main__':
    init_app()
    main()
Example #39
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gevent import monkey
monkey.patch_all()

# PATCH: avoid deadlock on getaddrinfo, this patch is necessary while waiting for
# the final gevent 1.1 release (https://github.com/gevent/gevent/issues/349)
# unicode('foo').encode('idna')  # noqa

from psycogreen.gevent import patch_psycopg  # noqa
patch_psycopg()


import os  # noqa

from website import settings  # noqa
from website.app import init_app  # noqa

application = app = init_app('website.settings', set_backends=True, routes=True)

if __name__ == '__main__':
    host = os.environ.get('OSF_HOST', None)
    port = os.environ.get('OSF_PORT', None)
    if port:
        port = int(port)

    app.run(host=host, port=port, extra_files=[settings.ASSET_HASH_PATH], threaded=settings.DEBUG_MODE)
Example #40
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''Migration script for Search-enabled Models.'''
from __future__ import absolute_import
from modularodm.query.querydialect import DefaultQueryDialect as Q
from website.models import Node
from framework.auth import User
import website.search.search as search

from website.app import init_app

app = init_app("website.settings", set_backends=True, routes=True)


def migrate_nodes():
    for node in Node.find(
            Q('is_public', 'eq', True)
            & Q('is_deleted', 'eq', False)):
        node.update_search()


def migrate_users():
    for user in User.find(
            Q('is_registered', 'eq', True)
            & Q('date_confirmed', 'ne', None)):
        user.update_search()


def main():

    ctx = app.test_request_context()
Example #41
0
def run_main(dry_run=True):
    init_app(routes=False)
    if not dry_run:
        scripts_utils.add_file_logger(logger, __file__)
    main(dry_run=dry_run)
Example #42
0
def clear_sessions(ctx, months=1, dry_run=False):
    from website.app import init_app
    init_app(routes=False, set_backends=True)
    from scripts import clear_sessions
    clear_sessions.clear_sessions_relative(months=months, dry_run=dry_run)
Example #43
0
import mock
from nose.tools import *

from tests.base import OsfTestCase
from tests.factories import AuthUserFactory
from website.app import init_app
from website.addons.twofactor.tests import _valid_code

app = init_app(
    routes=True,
    set_backends=False,
    settings_module='website.settings',
)

class TestViews(OsfTestCase):
    @mock.patch('website.addons.twofactor.models.push_status_message')
    def setUp(self, mocked):
        super(TestViews, self).setUp()
        self.user = AuthUserFactory()
        self.user.add_addon('twofactor')
        self.user_settings = self.user.get_addon('twofactor')

    def test_confirm_code(self):
        # Send a valid code to the API endpoint for the user settings.
        res = self.app.post_json(
            '/api/v1/settings/twofactor/',
            {'code': _valid_code(self.user_settings.totp_secret)},
            auth=self.user.auth
        )

        # reload the user settings object from the DB
Example #44
0
from webtest_plus import TestApp

from .json_api_test_app import JSONAPITestApp

from nose.tools import *  # noqa (PEP8 asserts); noqa (PEP8 asserts)

logger = logging.getLogger(__name__)


def get_default_metaschema():
    """This needs to be a method so it gets called after the test database is set up"""
    return MetaSchema.find()[0]


try:
    test_app = init_app(routes=True, set_backends=False)
except AssertionError:  # Routes have already been set up
    test_app = init_app(routes=False, set_backends=False)

rm_handlers(test_app, django_handlers)

test_app.testing = True

# Silence some 3rd-party logging and some "loud" internal loggers
SILENT_LOGGERS = [
    'api.caching.tasks',
    'factory.generate',
    'factory.containers',
    'framework.analytics',
    'framework.auth.core',
    'website.app',
def main():
    init_app(routes=False)  # Sets the storage backends on all models
    dry = 'dry' in sys.argv
    if not dry:
        script_utils.add_file_logger(logger, __file__)
    do_migration(get_targets(), dry)
Example #46
0
                                    'wiki_pages_current': wiki_pages_current
                                }
                            })

                    else:
                        cloned_wiki_pages[key].append(wiki_id)

            db.node.update(
                {'_id': node['_id']},
                {'$set': {
                    'wiki_pages_versions': cloned_wiki_pages
                }})


# Wiki pages with nodes that no longer exist are removed from NodeWikiPage
# and put into a separate collection
def move_to_backup_collection(node_wiki_page):
    db[BACKUP_COLLECTION].insert(node_wiki_page.to_storage())
    NodeWikiPage.remove_one(Q('_id', 'eq', node_wiki_page._id))


if __name__ == '__main__':
    dry = '--dry' in sys.argv
    if not dry:
        script_utils.add_file_logger(logger, __file__)
    init_app(routes=False, set_backends=True)
    with TokuTransaction():
        main()
        if dry:
            raise Exception('Dry Run -- Aborting Transaction')
Example #47
0
def main(dry=True):
    # Set up storage backends
    init_app(routes=False)
    users = list(get_users())
    update_users(users, dry=dry)
    subscribe_users(users, dry=dry)  # confirm list name before running script
Example #48
0
#!/usr/bin/env python
import logging
import sys
import os

if __name__ == '__main__':
    from django.core.management import execute_from_command_line

    # allow the osf app/model initialization to be skipped so we can run django
    # commands like collectstatic w/o requiring a database to be running
    if '--no-init-app' in sys.argv:
        sys.argv.remove('--no-init-app')
        logging.basicConfig(level=logging.INFO)
    else:
        from website.app import init_app
        init_app(set_backends=True,
                 routes=False,
                 attach_request_handlers=False,
                 fixtures=False)

    if os.environ.get('DJANGO_SETTINGS_MODULE'
                      ) == 'admin.base.settings' and 'migrate' in sys.argv:
        raise RuntimeError(
            'Running migrations from the admin project is disallowed.')

    execute_from_command_line(sys.argv)
Example #49
0
                )


def find_inactive_users_with_no_inactivity_email_sent_or_queued():
    inactive_users = User.find(
        (Q('date_last_login', 'lt',
           datetime.utcnow() - settings.NO_LOGIN_WAIT_TIME)
         & Q('osf4m', 'ne', 'system_tags'))
        | (Q('date_last_login', 'lt',
             datetime.utcnow() - settings.NO_LOGIN_OSF4M_WAIT_TIME)
           & Q('osf4m', 'eq', 'system_tags')))
    inactive_emails = mails.QueuedMail.find(
        Q('email_type', 'eq', mails.NO_LOGIN_TYPE))

    #This is done to prevent User query returns comparison to User, as equality fails
    #on datetime fields due to pymongo rounding. Instead here _id is compared.
    users_sent_id = [email.user._id for email in inactive_emails]
    inactive_ids = [user._id for user in inactive_users if user.is_active]
    users_to_send = [
        User.load(id) for id in (set(inactive_ids) - set(users_sent_id))
    ]
    return users_to_send


if __name__ == '__main__':
    dry_run = 'dry' in sys.argv
    init_app(routes=False)
    if not dry_run:
        add_file_logger(logger, __file__)
    main(dry_run=dry_run)
Example #50
0
def app_init():
    init_app(routes=False, set_backends=False)
Example #51
0
def main(env):
    INSTITUTIONS = []

    if env == 'prod':
        INSTITUTIONS = [
            {
                '_id': 'cos',
                'name': 'Center For Open Science',
                'description': 'Center for Open Science',
                'banner_name': 'cos-banner.png',
                'logo_name': 'cos-shield.png',
                'auth_url': None,
                'domains': ['osf.cos.io'],
                'email_domains': ['cos.io'],
            },
            {
                '_id': 'nd',
                'name': 'University of Notre Dame',
                'description': 'University of Notre Dame',
                'banner_name': 'nd-banner.png',
                'logo_name': 'nd-shield.png',
                'auth_url': SHIBBOLETH_SP.format(encode_uri_component('https://login.nd.edu/idp/shibboleth')),
                'domains': ['osf.nd.edu'],
                'email_domains': [],
            },
            {
                '_id': 'ucr',
                'name': 'University of California Riverside',
                'description': 'University of California Riverside',
                'banner_name': 'ucr-banner.png',
                'logo_name': 'ucr-shield.png',
                'auth_url': SHIBBOLETH_SP.format(encode_uri_component('urn:mace:incommon:ucr.edu')),
                'domains': ['osf.ucr.edu'],
                'email_domains': [],
            },
            {
                '_id': 'usc',
                'name': 'University of Southern California',
                'description': 'University of Southern California',
                'banner_name': 'usc-banner.png',
                'logo_name': 'usc-shield.png',
                'auth_url': SHIBBOLETH_SP.format(encode_uri_component('urn:mace:incommon:usc.edu')),
                'domains': ['osf.usc.edu'],
                'email_domains': [],
            },
        ]
    if env == 'stage':
        INSTITUTIONS = [
            {
                '_id': 'cos',
                'name': 'Center For Open Science [Stage]',
                'description': 'Center for Open Science [Stage]',
                'banner_name': 'cos-banner.png',
                'logo_name': 'cos-shield.png',
                'auth_url': None,
                'domains': ['staging-osf.cos.io'],
                'email_domains': ['cos.io'],
            },
            {
                '_id': 'nd',
                'name': 'University of Notre Dame [Stage]',
                'description': 'University of Notre Dame [Stage]',
                'banner_name': 'nd-banner.png',
                'logo_name': 'nd-shield.png',
                'auth_url': SHIBBOLETH_SP.format(encode_uri_component('https://login-test.cc.nd.edu/idp/shibboleth')),
                'domains': ['staging-osf-nd.cos.io'],
                'email_domains': [],
            },
        ]
    if env == 'stage2':
        INSTITUTIONS = [
            {
                '_id': 'cos',
                'name': 'Center For Open Science [Stage2]',
                'description': 'Center for Open Science [Stage2]',
                'banner_name': 'cos-banner.png',
                'logo_name': 'cos-shield.png',
                'auth_url': None,
                'domains': ['staging2-osf.cos.io'],
                'email_domains': ['cos.io'],
            },
        ]
    elif env == 'test':
        INSTITUTIONS = [
            {
                '_id': 'cos',
                'name': 'Center For Open Science [Test]',
                'description': 'Center for Open Science [Test]',
                'banner_name': 'cos-banner.png',
                'logo_name': 'cos-shield.png',
                'auth_url': None,
                'domains': ['test-osf.cos.io'],
                'email_domains': ['cos.io'],
            },
            {
                '_id': 'nd',
                'name': 'University of Notre Dame [Test]',
                'description': 'University of Notre Dame [Test]',
                'banner_name': 'nd-banner.png',
                'logo_name': 'nd-shield.png',
                'auth_url': SHIBBOLETH_SP.format(encode_uri_component('https://login-test.cc.nd.edu/idp/shibboleth')),
                'domains': ['test-osf-nd.cos.io'],
                'email_domains': [],
            },
            {
                '_id': 'ucr',
                'name': 'University of California Riverside [Test]',
                'description': 'University of California Riverside [Test]',
                'banner_name': 'ucr-banner.png',
                'logo_name': 'ucr-shield.png',
                'auth_url': SHIBBOLETH_SP.format(encode_uri_component('urn:mace:incommon:ucr.edu')),
                'domains': ['test-osf-ucr.cos.io'],
                'email_domains': [],
            },
            {
                '_id': 'usc',
                'name': 'University of Southern California [Test]',
                'description': 'University of Southern California [Test]',
                'banner_name': 'usc-banner.png',
                'logo_name': 'usc-shield.png',
                'auth_url': SHIBBOLETH_SP.format(encode_uri_component('urn:mace:incommon:usc.edu')),
                'domains': ['test-osf-usc.cos.io'],
                'email_domains': [],
            },
        ]

    init_app(routes=False)
    with TokuTransaction():
        for inst_data in INSTITUTIONS:
            new_inst, inst_created = update_or_create(inst_data)
        for extra_inst in Institution.find(Q('_id', 'nin', [x['_id'] for x in INSTITUTIONS])):
            logger.warn('Extra Institution : {} - {}'.format(extra_inst._id, extra_inst.name))
Example #52
0
def render_generations_from_node_structure_list(parent, creator,
                                                node_structure_list):
    new_parent = None
    for node_number in node_structure_list:
        if isinstance(node_number, list):
            render_generations_from_node_structure_list(
                new_parent or parent, creator, node_number)
        else:
            new_parent = render_generations_from_parent(
                parent, creator, node_number)
    return new_parent


def main():
    args = parse_args()
    creator = models.User.find(Q('username', 'eq', args.user))[0]
    for i in range(args.n_projects):
        name = args.name + str(i) if args.name else ''
        create_fake_project(creator, args.n_users, args.privacy,
                            args.n_components, name, args.n_tags,
                            args.presentation_name, args.is_registration,
                            args.is_preprint, args.preprint_providers)
    print('Created {n} fake projects.'.format(n=args.n_projects))
    sys.exit(0)


if __name__ == '__main__':
    init_app(set_backends=True, routes=False)
    main()
def main(dry=True):
    init_app(set_backends=True,
             routes=False)  # Sets the storage backends on all models
    do_migration(dry=dry)
Example #54
0
def main(dry=True):
    init_app(set_backends=True, routes=False)  # Sets the storage backends on all models
    with TokuTransaction():
        do_migration()
        if dry:
            raise Exception('Abort Transaction - Dry Run')
Example #55
0
 def __init__(self):
     init_app(routes=False)
Example #56
0
def main(dry=True):
    init_app(set_backends=True, routes=False)
    if not dry:
        scripts_utils.add_file_logger(logger, __file__)
    migrate_drafts(dry)
Example #57
0
def main(dry=True):
    init_app(routes=False)
    with TokuTransaction():
        do_migration(get_targets())
        if dry:
            raise Exception('Abort Transaction - Dry Run')
Example #58
0
def main():
    init_app(routes=False)  # Sets the storage backends on all models
    Sitemap().generate()
Example #59
0
def main():
    init_app(set_backends=True, routes=False)
    populate_conferences()
def main(env):
    INSTITUTIONS = []

    if env == 'prod':
        INSTITUTIONS = [
            {
                '_id': 'cos',
                'name': 'Center For Open Science',
                'description': 'COS is a non-profit technology company providing free and open services to increase inclusivity and transparency of research. Find out more at <a href="https://cos.io">cos.io</a>.',
                'banner_name': 'cos-banner.png',
                'logo_name': 'cos-shield.png',
                'auth_url': None,
                'domains': ['osf.cos.io'],
                'email_domains': ['cos.io'],
            },
            # {
            #     '_id': 'nd',
            #     'name': 'University of Notre Dame',
            #     'description': None,
            #     'banner_name': 'nd-banner.png',
            #     'logo_name': 'nd-shield.png',
            #     'auth_url': SHIBBOLETH_SP.format(encode_uri_component('https://login.nd.edu/idp/shibboleth')),
            #     'domains': ['osf.nd.edu'],
            #     'email_domains': [],
            # },
            {
                '_id': 'ucr',
                'name': 'University of California Riverside',
                'description': 'Policy prohibits storing PII or HIPAA data on this site, please see C&amp;C\'s <a href="http://cnc.ucr.edu/security/researchers.html">security site</a> for more information.',
                'banner_name': 'ucr-banner.png',
                'logo_name': 'ucr-shield.png',
                'auth_url': SHIBBOLETH_SP.format(encode_uri_component('urn:mace:incommon:ucr.edu')),
                'domains': ['osf.ucr.edu'],
                'email_domains': [],
            },
            # {
            #     '_id': 'ugent',
            #     'name': 'Universiteit Gent [Test]',
            #     'description': None,
            #     'banner_name': 'ugent-banner.png',
            #     'logo_name': 'ugent-shield.png',
            #     'auth_url': SHIBBOLETH_SP.format(encode_uri_component('https://identity.ugent.be/simplesaml/saml2/idp/metadata.php')),
            #     'domains': ['osf.ugent.be'],
            #     'email_domains': [],
            # },
            {
                '_id': 'usc',
                'name': 'University of Southern California',
                'description': 'Projects must abide by <a href="http://policy.usc.edu/info-security/">USC\'s Information Security Policy</a>. Data stored for human subject research repositories must abide by <a href="http://policy.usc.edu/biorepositories/">USC\'s Biorepository Policy</a>. The OSF may not be used for storage of Personal Health Information that is subject to <a href="http://policy.usc.edu/hipaa/">HIPPA regulations</a>.',
                'banner_name': 'usc-banner.png',
                'logo_name': 'usc-shield.png',
                'auth_url': SHIBBOLETH_SP.format(encode_uri_component('urn:mace:incommon:usc.edu')),
                'domains': ['osf.usc.edu'],
                'email_domains': [],
            },
            # {
            #     '_id': 'uva',
            #     'name': 'University of Virginia',
            #     'description': 'Projects must abide by the University <a href="http://www.virginia.edu/informationpolicy/security.html">Security and Data Protection Policies</a>',
            #     'banner_name': 'uva-banner.png',
            #     'logo_name': 'uva-shield.png',
            #     'auth_url': SHIBBOLETH_SP.format(encode_uri_component('urn:mace:incommon:virginia.edu')),
            #     'domains': ['osf.virginia.edu'],
            #     'email_domains': [],
            # },
        ]
    if env == 'stage':
        INSTITUTIONS = [
            {
                '_id': 'cos',
                'name': 'Center For Open Science [Stage]',
                'description': 'Center for Open Science [Stage]',
                'banner_name': 'cos-banner.png',
                'logo_name': 'cos-shield.png',
                'auth_url': None,
                'domains': ['staging-osf.cos.io'],
                'email_domains': ['cos.io'],
            },
            {
                '_id': 'nd',
                'name': 'University of Notre Dame [Stage]',
                'description': 'University of Notre Dame [Stage]',
                'banner_name': 'nd-banner.png',
                'logo_name': 'nd-shield.png',
                'auth_url': SHIBBOLETH_SP.format(encode_uri_component('https://login-test.cc.nd.edu/idp/shibboleth')),
                'domains': ['staging-osf-nd.cos.io'],
                'email_domains': [],
            },
        ]
    if env == 'stage2':
        INSTITUTIONS = [
            {
                '_id': 'cos',
                'name': 'Center For Open Science [Stage2]',
                'description': 'Center for Open Science [Stage2]',
                'banner_name': 'cos-banner.png',
                'logo_name': 'cos-shield.png',
                'auth_url': None,
                'domains': ['staging2-osf.cos.io'],
                'email_domains': ['cos.io'],
            },
        ]
    elif env == 'test':
        INSTITUTIONS = [
            {
                '_id': 'cos',
                'name': 'Center For Open Science [Test]',
                'description': 'COS is a non-profit technology company providing free and open services to increase inclusivity and transparency of research. Find out more at <a href="https://cos.io">cos.io</a>.',
                'banner_name': 'cos-banner.png',
                'logo_name': 'cos-shield.png',
                'auth_url': None,
                'domains': ['test-osf.cos.io'],
                'email_domains': ['cos.io'],
            },
            {
                '_id': 'nd',
                'name': 'University of Notre Dame [Test]',
                'description': 'University of Notre Dame [Test]',
                'banner_name': 'nd-banner.png',
                'logo_name': 'nd-shield.png',
                'auth_url': SHIBBOLETH_SP.format(encode_uri_component('https://login-test.cc.nd.edu/idp/shibboleth')),
                'domains': ['test-osf-nd.cos.io'],
                'email_domains': [],
            },
            {
                '_id': 'ucr',
                'name': 'University of California Riverside [Test]',
                'description': 'Policy prohibits storing PII or HIPAA data on this site, please see C&amp;C\'s <a href="http://cnc.ucr.edu/security/researchers.html">security site</a> for more information.',
                'banner_name': 'ucr-banner.png',
                'logo_name': 'ucr-shield.png',
                'auth_url': SHIBBOLETH_SP.format(encode_uri_component('urn:mace:incommon:ucr.edu')),
                'domains': ['test-osf-ucr.cos.io'],
                'email_domains': [],
            },
            {
                '_id': 'ugent',
                'name': 'Universiteit Gent [Test]',
                'description': 'Universiteit Gent [Test]',
                'banner_name': 'ugent-banner.png',
                'logo_name': 'ugent-shield.png',
                'auth_url': SHIBBOLETH_SP.format(encode_uri_component('https://identity.ugent.be/simplesaml/saml2/idp/metadata.php')),
                'domains': ['test-osf-ugent.cos.io'],
                'email_domains': [],
            },
            {
                '_id': 'usc',
                'name': 'University of Southern California [Test]',
                'description': 'Projects must abide by <a href="http://policy.usc.edu/info-security/">USC\'s Information Security Policy</a>. Data stored for human subject research repositories must abide by <a href="http://policy.usc.edu/biorepositories/">USC\'s Biorepository Policy</a>. The OSF may not be used for storage of Personal Health Information that is subject to <a href="http://policy.usc.edu/hipaa/">HIPPA regulations</a>.',
                'banner_name': 'usc-banner.png',
                'logo_name': 'usc-shield.png',
                'auth_url': SHIBBOLETH_SP.format(encode_uri_component('urn:mace:incommon:usc.edu')),
                'domains': ['test-osf-usc.cos.io'],
                'email_domains': [],
            },
            {
                '_id': 'uva',
                'name': 'University of Virginia [Test]',
                'description': 'Projects must abide by the University <a href="http://www.virginia.edu/informationpolicy/security.html">Security and Data Protection Policies</a>',
                'banner_name': 'uva-banner.png',
                'logo_name': 'uva-shield.png',
                'auth_url': SHIBBOLETH_SP.format(encode_uri_component('https://shibidp-test.its.virginia.edu/idp/shibboleth')),
                'domains': ['test-osf-virginia.cos.io'],
                'email_domains': [],
            },
        ]

    init_app(routes=False)
    with TokuTransaction():
        for inst_data in INSTITUTIONS:
            new_inst, inst_created = update_or_create(inst_data)
            # update the nodes elastic docs, to have current names of institutions. This will
            # only work properly if this file is the only thing changing institution attributes
            if not inst_created:
                nodes = Node.find_by_institution(new_inst, query=Q('is_deleted', 'ne', True))
                for node in nodes:
                    update_node(node, async=False)
        for extra_inst in Institution.find(Q('_id', 'nin', [x['_id'] for x in INSTITUTIONS])):
            logger.warn('Extra Institution : {} - {}'.format(extra_inst._id, extra_inst.name))