def setupAllDB():
    """Sets up all databases"""
    createDatabase(CONFIG_DB['db_name'])
    runMigrations()
    setupJobTrackerDB()
    setupErrorDB()
    setupUserDB()
    setupJobQueueDB()
    setupValidationDB()
Esempio n. 2
0
def full_database_setup():
    """Sets up a clean database based on the model metadata. It also
    calculates the FK relationships between tables so we can delete them in
    order. It yields a tuple the _DB and ordered list of tables."""
    rand_id = str(randint(1, 9999))

    config = dataactcore.config.CONFIG_DB
    config['db_name'] = 'unittest{}_data_broker'.format(rand_id)
    dataactcore.config.CONFIG_DB = config

    createDatabase(config['db_name'])
    db = dbConnection()
    runMigrations()

    creation_order = baseModel.Base.metadata.sorted_tables
    yield (db, list(reversed(creation_order)))  # drop order

    db.close()
    dropDatabase(config['db_name'])
Esempio n. 3
0
    def setUpClass(cls):
        """Set up resources to be shared within a test class"""
        #TODO: refactor into a pytest class fixtures and inject as necessary
        # update application's db config options so unittests
        # run against test databases
        suite = cls.__name__.lower()
        config = dataactcore.config.CONFIG_DB
        cls.num = randint(1, 9999)
        config['db_name'] = 'unittest{}_{}_data_broker'.format(
            cls.num, suite)
        dataactcore.config.CONFIG_DB = config
        createDatabase(CONFIG_DB['db_name'])
        runMigrations()

        app = createApp()
        app.config['TESTING'] = True
        app.config['DEBUG'] = False
        cls.app = TestApp(app)

        # Allow us to augment default test failure msg w/ more detail
        cls.longMessage = True
        # Upload files to S3 (False = skip re-uploading on subsequent runs)
        cls.uploadFiles = True
        # Run tests for local broker or not
        cls.local = CONFIG_BROKER['local']
        # This needs to be set to the local directory for error reports if local is True
        cls.local_file_directory = CONFIG_SERVICES['error_report_path']

        # drop and re-create test job db/tables
        setupJobTrackerDB()
        # drop and re-create test error db/tables
        setupErrorDB()
        # drop and re-create test validation db
        setupValidationDB()

        cls.userId = None
        # constants to use for default submission start and end dates
        cls.SUBMISSION_START_DEFAULT = datetime(2015, 10, 1)
        cls.SUBMISSION_END_DEFAULT = datetime(2015, 10, 31)
def setupJobQueueDB():
    """Create job tracker tables from model metadata."""
    createDatabase(CONFIG_DB['job_queue_db_name'])
def setupUserDB():
    """Create user tables from model metadata."""
    createDatabase(CONFIG_DB['user_db_name'])
    runMigrations('user_manager')
    insertCodes()
Esempio n. 6
0
def setupErrorDB():
    """Create job tracker tables from model metadata."""
    createDatabase(CONFIG_DB['error_db_name'])
    runMigrations('error_data')
    insertCodes()
    def setUpClass(cls):
        """Set up resources to be shared within a test class"""
        cls.session_id = ""

        with createValidatorApp().app_context():

            # update application's db config options so unittests
            # run against test databases
            suite = cls.__name__.lower()
            config = dataactcore.config.CONFIG_DB
            cls.num = randint(1, 9999)
            config['db_name'] = 'unittest{}_{}_data_broker'.format(
                cls.num, suite)
            dataactcore.config.CONFIG_DB = config
            createDatabase(CONFIG_DB['db_name'])
            runMigrations()

            # drop and re-create test user db/tables
            setupUserDB()
            # drop and re-create test job db/tables
            setupJobTrackerDB()
            # drop and re-create test error db/tables
            setupErrorDB()
            # drop and re-create test validation db/tables
            setupValidationDB()
            # load e-mail templates
            setupEmails()

            # set up default e-mails for tests
            test_users = {}
            test_users['admin_email'] = '*****@*****.**'
            test_users['change_user_email'] = '*****@*****.**'
            test_users['password_reset_email'] = '*****@*****.**'
            test_users['inactive_email'] = '*****@*****.**'
            test_users['password_lock_email'] = '*****@*****.**'
            test_users['expired_lock_email'] = '*****@*****.**'
            test_users['agency_admin_email'] = '*****@*****.**'

            # this email is for a regular agency_user email that is to be used for
            # testing functionality expected by a normal, base user
            test_users['agency_user'] = '******'
            test_users['approved_email'] = '*****@*****.**'
            test_users['submission_email'] = '*****@*****.**'
            user_password = '******'
            admin_password = '******'

            # set up users for status tests
            StatusTestUser = namedtuple(
                'StatusTestUser',
                ['email', 'user_status', 'permissions', 'user_type'])
            StatusTestUser.__new__.__defaults__ = (None, None,
                                                   AccountType.AGENCY_USER,
                                                   None)
            status_test_users = []
            status_test_users.append(
                StatusTestUser('*****@*****.**', 'awaiting_confirmation', 0))
            status_test_users.append(
                StatusTestUser('*****@*****.**', 'email_confirmed'))
            status_test_users.append(
                StatusTestUser('*****@*****.**', 'awaiting_approval'))
            status_test_users.append(
                StatusTestUser('*****@*****.**', 'awaiting_approval'))
            status_test_users.append(
                StatusTestUser('*****@*****.**',
                               'awaiting_approval'))
            status_test_users.append(
                StatusTestUser(
                    test_users['admin_email'], 'approved',
                    AccountType.WEBSITE_ADMIN + AccountType.AGENCY_USER))
            status_test_users.append(
                StatusTestUser(test_users['approved_email'], 'approved'))
            status_test_users.append(
                StatusTestUser('*****@*****.**', 'denied'))

            # add new users
            createUserWithPassword(test_users["submission_email"],
                                   user_password, Bcrypt())
            createUserWithPassword(test_users["change_user_email"],
                                   user_password, Bcrypt())
            createUserWithPassword(test_users["password_reset_email"],
                                   user_password, Bcrypt())
            createUserWithPassword(test_users["inactive_email"], user_password,
                                   Bcrypt())
            createUserWithPassword(test_users["password_lock_email"],
                                   user_password, Bcrypt())
            createUserWithPassword(test_users['expired_lock_email'],
                                   user_password, Bcrypt())
            createUserWithPassword(test_users['agency_admin_email'],
                                   admin_password,
                                   Bcrypt(),
                                   permission=4)
            createUserWithPassword(test_users['agency_user'], user_password,
                                   Bcrypt())

            # get user info and save as class variables for use by tests

            sess = GlobalDB.db().session

            agencyUser = sess.query(User).filter(
                User.email == test_users['agency_user']).one()
            cls.agency_user_id = agencyUser.user_id

            # set the specified account to be expired
            expiredUser = sess.query(User).filter(
                User.email == test_users['expired_lock_email']).one()
            today = parse(time.strftime("%c"))
            expiredUser.last_login_date = (today -
                                           timedelta(days=120)).strftime("%c")
            sess.add(expiredUser)

            # create users for status testing
            for u in status_test_users:
                user = User(email=u.email,
                            permissions=u.permissions,
                            user_status=sess.query(UserStatus).filter(
                                UserStatus.name == u.user_status).one())
                sess.add(user)

            # set up approved user
            user = sess.query(User).filter(
                User.email == test_users['approved_email']).one()
            user.username = "******"
            user.cgac_code = "000"
            user.salt, user.password_hash = getPasswordHash(
                user_password, Bcrypt())
            sess.add(user)
            cls.approved_user_id = user.user_id

            # set up admin user
            admin = sess.query(User).filter(
                User.email == test_users['admin_email']).one()
            admin.salt, admin.password_hash = getPasswordHash(
                admin_password, Bcrypt())
            admin.name = "Mr. Manager"
            admin.cgac_code = "SYS"
            sess.add(admin)

            # set up status changed user
            statusChangedUser = sess.query(User).filter(
                User.email == test_users['change_user_email']).one()
            statusChangedUser.name = "Test User"
            statusChangedUser.user_status = sess.query(UserStatus).filter(
                UserStatus.name == 'email_confirmed').one()
            sess.add(statusChangedUser)
            cls.status_change_user_id = statusChangedUser.user_id

            # set up deactivated user
            deactivated_user = sess.query(User).filter(
                User.email == test_users['inactive_email']).one()
            deactivated_user.last_login_date = time.strftime("%c")
            deactivated_user.is_active = False
            sess.add(deactivated_user)

            sess.commit()

        # get lookup dictionaries
        cls.jobStatusDict = lookups.JOB_STATUS_DICT
        cls.jobTypeDict = lookups.JOB_TYPE_DICT
        cls.fileTypeDict = lookups.FILE_TYPE_DICT
        cls.fileStatusDict = lookups.FILE_STATUS_DICT
        cls.ruleSeverityDict = lookups.RULE_SEVERITY_DICT
        cls.errorTypeDict = lookups.ERROR_TYPE_DICT
        cls.publishStatusDict = lookups.PUBLISH_STATUS_DICT
        cls.userStatusDict = lookups.USER_STATUS_DICT

        # set up info needed by the individual test classes
        cls.test_users = test_users
        cls.user_password = user_password
        cls.admin_password = admin_password
        cls.local = CONFIG_BROKER['local']
def setupValidationDB():
    """Create validation tables from model metadata and do initial inserts."""
    createDatabase(CONFIG_DB['validator_db_name'])
    runMigrations('validation')
    insertCodes()
def setupValidationDB(hardReset = False):
    """Create validation tables from model metadata and do initial inserts."""
    createDatabase(CONFIG_DB['validator_db_name'])
    validatorDb = ValidatorValidationInterface()
    # TODO: use Alembic for initial db setup
    if hardReset:
        validationModels.Base.metadata.drop_all(validatorDb.engine)
    validationModels.Base.metadata.create_all(validatorDb.engine)

    validatorDb.session.commit()
    validatorDb.session.close()

    # insert rule timing
    ruleTimingList = [(1,'file_validation','Run during pre-load validation of a file'),
                      (2,'prerequisite','Run only when referenced by another rule'),
                      (3,'cross-file','This rule is checked during cross-file validation')]
    for r in ruleTimingList:
        ruleTiming = RuleTiming(rule_timing_id = r[0], name = r[1], description = r[2])
        validatorDb.session.merge(ruleTiming)

    # insert file types
    fileTypeList = [(1, 'award', 'award file'),
        (2, 'award_financial', 'award_financial file'),
        (3, 'appropriations', 'appropriations file'),
        (4, 'program_activity','program activity and object class file')]
    for f in fileTypeList:
        fileType = FileType(file_id=f[0], name=f[1], description=f[2])
        validatorDb.session.merge(fileType)

    # insert rule types
    ruleTypeList = [(1, 'TYPE', 'checks type'),
        (2, 'EQUAL', 'equals operatior '),
        (3, 'NOT EQUAL', 'not equals operator '),
        (4, 'LESS', 'less than operator '),
        (5, 'GREATER', 'greater than operator'),
        (6, 'LENGTH', 'string length'),
        (7, 'IN_SET', 'value must be in set'),
        (8, 'MIN LENGTH', 'length of data must be at least reference value'),
        (9, 'REQUIRED_CONDITIONAL', 'field is required if secondary rule passes'),
        (10, 'SUM', 'field is equal to the sum of other fields')
        ]
    for r in ruleTypeList:
        ruleType = RuleType(rule_type_id=r[0], name=r[1], description=r[2])
        validatorDb.session.merge(ruleType)

    # insert field types
    fieldTypeList = [(1, 'INT', 'integer type'),
        (2, 'DECIMAL', 'decimal type '),
        (3, 'BOOLEAN', 'yes/no'),
        (4, 'STRING', 'string type'),
        (5, 'LONG', 'long integer')]
    for f in fieldTypeList:
        fieldType = FieldType(field_type_id=f[0], name=f[1], description=f[2])
        validatorDb.session.merge(fieldType)

    # insert multi-field rule types
    mfrTypeList = [(1, 'CAR_MATCH', 'Matching a set of fields against a CAR file'),
                   (2, 'FIELD_MATCH', 'Match a set of fields against a different file'),
                   (3, 'RULE_IF', 'Apply first rule if second rule passes'),
                   (4, 'GREATER', 'Check if field is greater than specified value'),
                   (5, 'SUM_TO_VALUE', 'Sum a set of fields and compare to specified value')
                   ]
    for m in mfrTypeList:
        mfrt = MultiFieldRuleType(
            multi_field_rule_type_id = m[0], name=m[1], description=m[2])
        validatorDb.session.merge(mfrt)

    validatorDb.session.commit()
    validatorDb.session.close()
def setupJobQueueDB():
    """Create job tracker tables from model metadata."""
    createDatabase(CONFIG_DB['job_queue_db_name'])
Esempio n. 11
0
def setupUserDB():
    """Create user tables from model metadata."""
    createDatabase(CONFIG_DB['user_db_name'])
    runMigrations('user_manager')
    insertCodes()
def setupValidationDB():
    """Create validation tables from model metadata and do initial inserts."""
    createDatabase(CONFIG_DB['validator_db_name'])
    runMigrations('validation')
    insertCodes()
Esempio n. 13
0
def setupJobTrackerDB():
    """Create job tracker tables from model metadata."""
    createDatabase(CONFIG_DB['job_db_name'])
    runMigrations('job_tracker')
    insertCodes()
Esempio n. 14
0
def setupStagingDB():
    """Create the staging database."""
    createDatabase(CONFIG_DB['staging_db_name'])
def setupStagingDB():
    """Create job tracker tables from model metadata."""
    createDatabase(CONFIG_DB['staging_db_name'])
    runMigrations('staging')
def setupValidationDB(hardReset=False):
    """Create validation tables from model metadata and do initial inserts."""
    createDatabase(CONFIG_DB['validator_db_name'])
    validatorDb = ValidatorValidationInterface()
    # TODO: use Alembic for initial db setup
    if hardReset:
        validationModels.Base.metadata.drop_all(validatorDb.engine)
    validationModels.Base.metadata.create_all(validatorDb.engine)

    validatorDb.session.commit()
    validatorDb.session.close()

    # insert rule timing
    ruleTimingList = [
        (1, 'file_validation', 'Run during pre-load validation of a file'),
        (2, 'prerequisite', 'Run only when referenced by another rule'),
        (3, 'cross-file', 'This rule is checked during cross-file validation')
    ]
    for r in ruleTimingList:
        ruleTiming = RuleTiming(rule_timing_id=r[0],
                                name=r[1],
                                description=r[2])
        validatorDb.session.merge(ruleTiming)

    # insert file types
    fileTypeList = [(1, 'award', 'award file'),
                    (2, 'award_financial', 'award_financial file'),
                    (3, 'appropriations', 'appropriations file'),
                    (4, 'program_activity',
                     'program activity and object class file')]
    for f in fileTypeList:
        fileType = FileType(file_id=f[0], name=f[1], description=f[2])
        validatorDb.session.merge(fileType)

    # insert rule types
    ruleTypeList = [(1, 'TYPE', 'checks type'),
                    (2, 'EQUAL', 'equals operatior '),
                    (3, 'NOT EQUAL', 'not equals operator '),
                    (4, 'LESS', 'less than operator '),
                    (5, 'GREATER', 'greater than operator'),
                    (6, 'LENGTH', 'string length'),
                    (7, 'IN_SET', 'value must be in set'),
                    (8, 'MIN LENGTH',
                     'length of data must be at least reference value'),
                    (9, 'REQUIRED_CONDITIONAL',
                     'field is required if secondary rule passes'),
                    (10, 'SUM', 'field is equal to the sum of other fields')]
    for r in ruleTypeList:
        ruleType = RuleType(rule_type_id=r[0], name=r[1], description=r[2])
        validatorDb.session.merge(ruleType)

    # insert field types
    fieldTypeList = [(1, 'INT', 'integer type'),
                     (2, 'DECIMAL', 'decimal type '), (3, 'BOOLEAN', 'yes/no'),
                     (4, 'STRING', 'string type'), (5, 'LONG', 'long integer')]
    for f in fieldTypeList:
        fieldType = FieldType(field_type_id=f[0], name=f[1], description=f[2])
        validatorDb.session.merge(fieldType)

    # insert multi-field rule types
    mfrTypeList = [
        (1, 'CAR_MATCH', 'Matching a set of fields against a CAR file'),
        (2, 'FIELD_MATCH', 'Match a set of fields against a different file'),
        (3, 'RULE_IF', 'Apply first rule if second rule passes'),
        (4, 'GREATER', 'Check if field is greater than specified value'),
        (5, 'SUM_TO_VALUE',
         'Sum a set of fields and compare to specified value')
    ]
    for m in mfrTypeList:
        mfrt = MultiFieldRuleType(multi_field_rule_type_id=m[0],
                                  name=m[1],
                                  description=m[2])
        validatorDb.session.merge(mfrt)

    validatorDb.session.commit()
    validatorDb.session.close()