Пример #1
0
    def test_repair_a_rule_with_source_replica_expression(self):
        """ JUDGE EVALUATOR: Test the judge when a with two rules with source_replica_expression"""
        scope = 'mock'
        files = create_files(3, scope, self.rse4)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe')
        attach_dids(scope, dataset, files, 'jdoe')

        # Add a first rule to the DS
        rule_id1 = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0]
        rule_id2 = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse3, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None, source_replica_expression=self.rse1)[0]

        assert(RuleState.REPLICATING == get_rule(rule_id1)['state'])
        assert(RuleState.STUCK == get_rule(rule_id2)['state'])

        successful_transfer(scope=scope, name=files[0]['name'], rse_id=self.rse1_id, nowait=False)
        successful_transfer(scope=scope, name=files[1]['name'], rse_id=self.rse1_id, nowait=False)
        successful_transfer(scope=scope, name=files[2]['name'], rse_id=self.rse1_id, nowait=False)
        # Also make replicas AVAILABLE
        session = get_session()
        replica = session.query(models.RSEFileAssociation).filter_by(scope=scope, name=files[0]['name'], rse_id=self.rse1_id).one()
        replica.state = ReplicaState.AVAILABLE
        replica = session.query(models.RSEFileAssociation).filter_by(scope=scope, name=files[1]['name'], rse_id=self.rse1_id).one()
        replica.state = ReplicaState.AVAILABLE
        replica = session.query(models.RSEFileAssociation).filter_by(scope=scope, name=files[2]['name'], rse_id=self.rse1_id).one()
        replica.state = ReplicaState.AVAILABLE
        session.commit()

        rule_repairer(once=True)

        assert(RuleState.OK == get_rule(rule_id1)['state'])
        assert(RuleState.REPLICATING == get_rule(rule_id2)['state'])
Пример #2
0
    def test_to_repair_a_rule_with_only_1_rse_whose_transfers_failed(self):
        """ JUDGE REPAIRER: Test to repair a rule with only 1 rse whose transfers failed (lock)"""

        rule_repairer(once=True)  # Clean out the repairer
        scope = 'mock'
        files = create_files(4, scope, self.rse4, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe')
        attach_dids(scope, dataset, files, 'jdoe')

        rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0]

        successful_transfer(scope=scope, name=files[0]['name'], rse_id=get_replica_locks(scope=files[0]['scope'], name=files[2]['name'])[0].rse_id, nowait=False)
        successful_transfer(scope=scope, name=files[1]['name'], rse_id=get_replica_locks(scope=files[1]['scope'], name=files[2]['name'])[0].rse_id, nowait=False)
        failed_transfer(scope=scope, name=files[2]['name'], rse_id=get_replica_locks(scope=files[2]['scope'], name=files[2]['name'])[0].rse_id)
        failed_transfer(scope=scope, name=files[3]['name'], rse_id=get_replica_locks(scope=files[3]['scope'], name=files[3]['name'])[0].rse_id)
        cancel_request_did(scope=scope, name=files[2]['name'], dest_rse_id=get_replica_locks(scope=files[2]['scope'], name=files[2]['name'])[0].rse_id)
        cancel_request_did(scope=scope, name=files[3]['name'], dest_rse_id=get_replica_locks(scope=files[3]['scope'], name=files[2]['name'])[0].rse_id)

        assert(rule_id == get_rule(rule_id)['id'].replace('-', '').lower())
        assert(RuleState.STUCK == get_rule(rule_id)['state'])
        rule_repairer(once=True)
        # Stil assert STUCK because of delays:
        assert(RuleState.STUCK == get_rule(rule_id)['state'])
        assert(get_replica_locks(scope=files[2]['scope'], name=files[2]['name'])[0].rse_id == get_replica_locks(scope=files[3]['scope'], name=files[3]['name'])[0].rse_id)
Пример #3
0
    def test_judge_evaluate_detach_datasetlock(self):
        """ JUDGE EVALUATOR: Test if the a datasetlock is detached correctly when removing a dataset from a container"""
        re_evaluator(once=True)

        scope = 'mock'
        files = create_files(3, scope, self.rse1, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe')
        attach_dids(scope, dataset, files, 'jdoe')

        container = 'container_' + str(uuid())
        add_did(scope, container, DIDType.from_sym('CONTAINER'), 'jdoe')
        attach_dids(scope, container, [{'scope': scope, 'name': dataset}], 'jdoe')

        # Add a rule to the Container
        add_rule(dids=[{'scope': scope, 'name': container}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)

        # Check if the datasetlock is there
        locks = [ds_lock for ds_lock in get_dataset_locks(scope=scope, name=dataset)]
        assert(len(locks) > 0)

        detach_dids(scope, container, [{'scope': scope, 'name': dataset}])

        # Fake judge
        re_evaluator(once=True)

        locks = [ds_lock for ds_lock in get_dataset_locks(scope=scope, name=dataset)]
        assert(len(locks) == 0)
Пример #4
0
    def test_account_counter_judge_evaluate_detach(self):
        """ JUDGE EVALUATOR: Test if the account counter is updated correctly when a file is removed from a DS"""
        re_evaluator(once=True)
        account_update(once=True)

        scope = 'mock'
        files = create_files(3, scope, self.rse1, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe')
        attach_dids(scope, dataset, files, 'jdoe')

        # Add a first rule to the DS
        add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='ALL', weight=None, lifetime=None, locked=False, subscription_id=None)

        account_update(once=True)

        account_counter_before = get_counter(self.rse1_id, 'jdoe')

        detach_dids(scope, dataset, [files[0]])

        # Fake judge
        re_evaluator(once=True)
        account_update(once=True)

        account_counter_after = get_counter(self.rse1_id, 'jdoe')
        assert(account_counter_before['bytes'] - 100 == account_counter_after['bytes'])
        assert(account_counter_before['files'] - 1 == account_counter_after['files'])
Пример #5
0
    def test_judge_add_files_to_dataset(self):
        """ JUDGE EVALUATOR: Test the judge when adding files to dataset"""
        scope = 'mock'
        files = create_files(3, scope, self.rse1)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe')

        # Add a first rule to the DS
        add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)

        attach_dids(scope, dataset, files, 'jdoe')
        re_evaluator(once=True)

        files = create_files(3, scope, self.rse1)
        attach_dids(scope, dataset, files, 'jdoe')

        # Fake judge
        re_evaluator(once=True)

        # Check if the Locks are created properly
        for file in files:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)
Пример #6
0
    def test_to_repair_a_rule_with_only_1_rse_whose_site_is_blacklisted(self):
        """ JUDGE REPAIRER: Test to repair a rule with only 1 rse whose site is blacklisted"""

        rse = rse_name_generator()
        rse_id = add_rse(rse)
        update_rse(rse_id, {'availability_write': False})
        set_local_account_limit(self.jdoe, rse_id, -1)

        rule_repairer(once=True)  # Clean out the repairer
        scope = InternalScope('mock')
        files = create_files(4, scope, self.rse4_id, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)

        rule_id = add_rule(dids=[{
            'scope': scope,
            'name': dataset
        }],
                           account=self.jdoe,
                           copies=1,
                           rse_expression=rse,
                           grouping='DATASET',
                           weight=None,
                           lifetime=None,
                           locked=False,
                           subscription_id=None,
                           ignore_availability=True,
                           activity='DebugJudge')[0]

        assert (RuleState.STUCK == get_rule(rule_id)['state'])
        rule_repairer(once=True)

        # Stil assert STUCK because of ignore_availability:
        assert (RuleState.STUCK == get_rule(rule_id)['state'])

        region = make_region().configure('dogpile.cache.memcached',
                                         expiration_time=3600,
                                         arguments={
                                             'url':
                                             config_get(
                                                 'cache', 'url', False,
                                                 '127.0.0.1:11211'),
                                             'distributed_lock':
                                             True
                                         })
        region.delete(sha256(rse.encode()).hexdigest())

        update_rse(rse_id, {'availability_write': True})
        rule_repairer(once=True)
        assert (RuleState.REPLICATING == get_rule(rule_id)['state'])
Пример #7
0
    def test_judge_expire_rule(self):
        """ JUDGE CLEANER: Test the judge when deleting expired rules"""
        scope = InternalScope('mock', **self.vo)
        files = create_files(3, scope, self.rse1_id)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)

        add_rule(dids=[{
            'scope': scope,
            'name': dataset
        }],
                 account=self.jdoe,
                 copies=1,
                 rse_expression=self.rse1,
                 grouping='NONE',
                 weight='fakeweight',
                 lifetime=-3,
                 locked=False,
                 subscription_id=None)[0]
        add_rule(dids=[{
            'scope': scope,
            'name': dataset
        }],
                 account=self.jdoe,
                 copies=2,
                 rse_expression=self.T1,
                 grouping='NONE',
                 weight='fakeweight',
                 lifetime=None,
                 locked=False,
                 subscription_id=None)[0]
        add_rule(dids=[{
            'scope': scope,
            'name': dataset
        }],
                 account=self.jdoe,
                 copies=3,
                 rse_expression=self.T1,
                 grouping='NONE',
                 weight='fakeweight',
                 lifetime=None,
                 locked=False,
                 subscription_id=None)[0]

        rule_cleaner(once=True)

        for file in files:
            rse_locks = get_replica_locks(scope=file['scope'],
                                          name=file['name'])
            assert (len(rse_locks) == 5)
Пример #8
0
    def test_judge_add_files_to_dataset_with_2_rules(self):
        """ JUDGE EVALUATOR: Test the judge when adding files to dataset with 2 rules"""
        scope = InternalScope('mock', **self.vo)
        files = create_files(3, scope, self.rse1_id)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe)

        # Add a first rule to the DS
        add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse5, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)
        add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.root, copies=1, rse_expression=self.rse5, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)

        attach_dids(scope, dataset, files, self.jdoe)
        re_evaluator(once=True)

        files = create_files(3, scope, self.rse1_id)
        attach_dids(scope, dataset, files, self.jdoe)

        # Fake judge
        re_evaluator(once=True)

        # Check if the Locks are created properly
        for file in files:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)
Пример #9
0
    def test_repair_a_rule_with_missing_locks(self):
        """ JUDGE EVALUATOR: Test the judge when a rule gets STUCK from re_evaluating and there are missing locks"""
        scope = InternalScope('mock', **self.vo)
        files = create_files(3, scope, self.rse4_id)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.DATASET, self.jdoe)

        # Add a first rule to the DS
        rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0]

        attach_dids(scope, dataset, files, self.jdoe)

        # Fake judge
        re_evaluator(once=True)

        # Check if the Locks are created properly
        for file in files:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)

        # Add more files to the DID
        files2 = create_files(3, scope, self.rse4_id)
        attach_dids(scope, dataset, files2, self.jdoe)

        # Mark the rule STUCK to fake that the re-evaluation failed
        session = get_session()
        rule = session.query(models.ReplicationRule).filter_by(id=rule_id).one()
        rule.state = RuleState.STUCK
        session.commit()

        rule_repairer(once=True)

        for file in files:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)
        for file in files2:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)
            assert(len(set([lock.rse_id for lock in get_replica_locks(scope=files[0]['scope'], name=files[0]['name'])]).intersection(set([lock.rse_id for lock in get_replica_locks(scope=file['scope'], name=file['name'])]))) == 2)
        assert(12 == get_rule(rule_id)['locks_replicating_cnt'])
Пример #10
0
    def test_repair_a_rule_with_missing_locks(self):
        """ JUDGE EVALUATOR: Test the judge when a rule gets STUCK from re_evaluating and there are missing locks"""
        scope = 'mock'
        files = create_files(3, scope, self.rse4)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe')

        # Add a first rule to the DS
        rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0]

        attach_dids(scope, dataset, files, 'jdoe')

        # Fake judge
        re_evaluator(once=True)

        # Check if the Locks are created properly
        for file in files:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)

        # Add more files to the DID
        files2 = create_files(3, scope, self.rse4)
        attach_dids(scope, dataset, files2, 'jdoe')

        # Mark the rule STUCK to fake that the re-evaluation failed
        session = get_session()
        rule = session.query(models.ReplicationRule).filter_by(id=rule_id).one()
        rule.state = RuleState.STUCK
        session.commit()

        rule_repairer(once=True)

        for file in files:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)
        for file in files2:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)
            assert(len(set([lock.rse_id for lock in get_replica_locks(scope=files[0]['scope'], name=files[0]['name'])]).intersection(set([lock.rse_id for lock in get_replica_locks(scope=file['scope'], name=file['name'])]))) == 2)
        assert(12 == get_rule(rule_id)['locks_replicating_cnt'])
Пример #11
0
    def test_judge_inject_delayed_rule(self):
        """ JUDGE INJECTOR: Test the judge when injecting a delayed rule"""
        scope = InternalScope('mock', **self.vo)
        files = create_files(1, scope, self.rse1_id)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.DATASET, self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)
        [file] = files

        # Add a delayed rule
        rule_id = add_rule(dids=[{
            'scope': scope,
            'name': dataset
        }],
                           account=self.jdoe,
                           copies=2,
                           rse_expression=self.T1,
                           grouping='DATASET',
                           weight=None,
                           lifetime=None,
                           locked=False,
                           subscription_id=None,
                           delay_injection=3600)[0]

        rule = get_rule(rule_id)
        assert rule['state'] == RuleState.INJECT
        assert rule['updated_at'] < rule['created_at']
        assert datetime.utcnow() + timedelta(seconds=3550) < rule[
            'created_at'] < datetime.utcnow() + timedelta(seconds=3650)

        # The time to create the rule has not yet arrived. The injector must skip this rule, no locks must be created
        rule_injector(once=True)
        assert get_rule(rule_id)['state'] == RuleState.INJECT
        assert not get_replica_locks(scope=file['scope'], name=file['name'])

        # simulate that time to inject the rule has arrived
        @transactional_session
        def __update_created_at(session=None):
            session.query(ReplicationRule).filter_by(
                id=rule_id).one().created_at = datetime.utcnow()

        __update_created_at()

        # The injector must create the locks now
        rule_injector(once=True)
        assert get_rule(rule_id)['state'] == RuleState.REPLICATING
        assert len(get_replica_locks(scope=file['scope'],
                                     name=file['name'])) == 2
Пример #12
0
    def test_judge_deny_rule(self):
        """ JUDGE INJECTOR: Test the judge when asking approval for a rule and denying it"""
        scope = InternalScope('mock')
        files = create_files(3, scope, self.rse1_id)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)

        # Add a first rule to the DS
        rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse4, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None, ask_approval=True)[0]

        assert(get_rule(rule_id)['state'] == RuleState.WAITING_APPROVAL)

        deny_rule(rule_id=rule_id, approver=self.jdoe)

        assert_raises(RuleNotFound, get_rule, rule_id)
Пример #13
0
    def test_judge_evaluate_detach_datasetlock(self):
        """ JUDGE EVALUATOR: Test if the a datasetlock is detached correctly when removing a dataset from a container"""
        re_evaluator(once=True)

        scope = InternalScope('mock', **self.vo)
        files = create_files(3, scope, self.rse1_id, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.DATASET, self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)

        container = 'container_' + str(uuid())
        add_did(scope, container, DIDType.CONTAINER, self.jdoe)
        attach_dids(scope, container, [{
            'scope': scope,
            'name': dataset
        }], self.jdoe)

        # Add a rule to the Container
        add_rule(dids=[{
            'scope': scope,
            'name': container
        }],
                 account=self.jdoe,
                 copies=1,
                 rse_expression=self.rse1,
                 grouping='DATASET',
                 weight=None,
                 lifetime=None,
                 locked=False,
                 subscription_id=None)

        # Check if the datasetlock is there
        locks = [
            ds_lock for ds_lock in get_dataset_locks(scope=scope, name=dataset)
        ]
        assert (len(locks) > 0)

        detach_dids(scope, container, [{'scope': scope, 'name': dataset}])

        # Fake judge
        re_evaluator(once=True)

        locks = [
            ds_lock for ds_lock in get_dataset_locks(scope=scope, name=dataset)
        ]
        assert (len(locks) == 0)
Пример #14
0
    def test_judge_inject_rule(self):
        """ JUDGE INJECTOR: Test the judge when injecting a rule"""
        scope = InternalScope('mock')
        files = create_files(3, scope, self.rse1_id)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)

        # Add a first rule to the DS
        rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None, asynchronous=True)[0]

        assert(get_rule(rule_id)['state'] == RuleState.INJECT)

        rule_injector(once=True)

        # Check if the Locks are created properly
        for file in files:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)
        assert(get_rule(rule_id)['state'] == RuleState.REPLICATING)
Пример #15
0
    def test_judge_add_dataset_to_container(self):
        """ JUDGE EVALUATOR: Test the judge when adding dataset to container"""
        scope = InternalScope('mock')
        files = create_files(3, scope, self.rse1_id)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)

        parent_container = 'dataset_' + str(uuid())
        add_did(scope, parent_container, DIDType.from_sym('CONTAINER'),
                self.jdoe)
        # Add a first rule to the DS
        add_rule(dids=[{
            'scope': scope,
            'name': parent_container
        }],
                 account=self.jdoe,
                 copies=2,
                 rse_expression=self.T1,
                 grouping='DATASET',
                 weight=None,
                 lifetime=None,
                 locked=False,
                 subscription_id=None)
        attach_dids(scope, parent_container, [{
            'scope': scope,
            'name': dataset
        }], self.jdoe)
        # Fake judge
        re_evaluator(once=True)

        # Check if the Locks are created properly
        for file in files:
            assert (len(
                get_replica_locks(scope=file['scope'],
                                  name=file['name'])) == 2)

        # Check if the DatasetLocks are created properly
        dataset_locks = [
            lock for lock in get_dataset_locks(scope=scope, name=dataset)
        ]
        assert (len(dataset_locks) == 2)
Пример #16
0
 def test_add_rule_with_r2d2_container_treating(self):
     """ JUDGE INJECTOR (CORE): Add a replication rule with an r2d2 container treatment"""
     scope = 'mock'
     container = 'asdf.r2d2_request.2016-04-01-15-00-00.ads.' + str(uuid())
     add_did(scope, container, DIDType.from_sym('CONTAINER'), 'jdoe')
     datasets = []
     for i in range(3):
         files = create_files(3, scope, self.rse1_id)
         dataset = 'dataset_' + str(uuid())
         datasets.append(dataset)
         add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe')
         attach_dids(scope, dataset, files, 'jdoe')
         attach_dids(scope, container, [{
             'scope': scope,
             'name': dataset
         }], 'jdoe')
     rule_id = add_rule(dids=[{
         'scope': scope,
         'name': container
     }],
                        account='jdoe',
                        copies=1,
                        rse_expression=self.rse1,
                        grouping='DATASET',
                        weight=None,
                        lifetime=900,
                        locked=False,
                        subscription_id=None,
                        ask_approval=True)[0]
     approve_rule(rule_id, approver='root')
     assert (get_rule(rule_id)['state'] == RuleState.INJECT)
     rule_injector(once=True)
     # Check if there is a rule for each file
     with assert_raises(RuleNotFound):
         get_rule(rule_id)
     for dataset in datasets:
         assert (len(
             [r for r in list_rules({
                 'scope': scope,
                 'name': dataset
             })]) > 0)
Пример #17
0
    def test_account_counter_judge_evaluate_detach(self):
        """ JUDGE EVALUATOR: Test if the account counter is updated correctly when a file is removed from a DS"""
        re_evaluator(once=True)
        account_update(once=True)

        scope = InternalScope('mock', **self.vo)
        files = create_files(3, scope, self.rse1_id, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.DATASET, self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)

        # Add a first rule to the DS
        add_rule(dids=[{
            'scope': scope,
            'name': dataset
        }],
                 account=self.jdoe,
                 copies=1,
                 rse_expression=self.rse1,
                 grouping='ALL',
                 weight=None,
                 lifetime=None,
                 locked=False,
                 subscription_id=None)

        account_update(once=True)

        account_counter_before = get_usage(self.rse1_id, self.jdoe)

        detach_dids(scope, dataset, [files[0]])

        # Fake judge
        re_evaluator(once=True)
        account_update(once=True)

        account_counter_after = get_usage(self.rse1_id, self.jdoe)
        assert (account_counter_before['bytes'] -
                100 == account_counter_after['bytes'])
        assert (account_counter_before['files'] -
                1 == account_counter_after['files'])
Пример #18
0
    def test_to_repair_a_rule_with_DATASET_grouping_whose_transfer_failed(self):
        """ JUDGE REPAIRER: Test to repair a rule with 1 failed transfer (lock)"""

        rule_repairer(once=True)  # Clean out the repairer
        scope = InternalScope('mock', **self.vo)
        files = create_files(4, scope, self.rse4_id, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.DATASET, self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)

        rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None, activity='DebugJudge')[0]

        successful_transfer(scope=scope, name=files[0]['name'], rse_id=get_replica_locks(scope=files[0]['scope'], name=files[2]['name'])[0].rse_id, nowait=False)
        successful_transfer(scope=scope, name=files[1]['name'], rse_id=get_replica_locks(scope=files[1]['scope'], name=files[2]['name'])[0].rse_id, nowait=False)
        failed_transfer(scope=scope, name=files[2]['name'], rse_id=get_replica_locks(scope=files[2]['scope'], name=files[2]['name'])[0].rse_id)
        failed_transfer(scope=scope, name=files[3]['name'], rse_id=get_replica_locks(scope=files[3]['scope'], name=files[3]['name'])[0].rse_id)

        assert(rule_id == get_rule(rule_id)['id'].replace('-', '').lower())
        assert(RuleState.STUCK == get_rule(rule_id)['state'])
        rule_repairer(once=True)
        assert(RuleState.REPLICATING == get_rule(rule_id)['state'])
        assert(get_replica_locks(scope=files[2]['scope'], name=files[2]['name'])[0].rse_id == get_replica_locks(scope=files[3]['scope'], name=files[3]['name'])[0].rse_id)
        assert(get_replica_locks(scope=files[1]['scope'], name=files[1]['name'])[0].rse_id == get_replica_locks(scope=files[3]['scope'], name=files[3]['name'])[0].rse_id)
Пример #19
0
    def test_account_counter_judge_evaluate_attach(self):
        """ JUDGE EVALUATOR: Test if the account counter is updated correctly when a file is added to a DS"""
        re_evaluator(once=True)
        account_update(once=True)

        scope = 'mock'
        files = create_files(3, scope, self.rse1, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe')

        # Add a first rule to the DS
        add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='ALL', weight=None, lifetime=None, locked=False, subscription_id=None)

        account_counter_before = get_usage(self.rse1_id, 'jdoe')
        attach_dids(scope, dataset, files, 'jdoe')

        # Fake judge
        re_evaluator(once=True)
        account_update(once=True)

        account_counter_after = get_usage(self.rse1_id, 'jdoe')
        assert(account_counter_before['bytes'] + 3 * 100 == account_counter_after['bytes'])
        assert(account_counter_before['files'] + 3 == account_counter_after['files'])
Пример #20
0
    def test_judge_ask_approval(self):
        """ JUDGE INJECTOR: Test the judge when asking approval for a rule"""
        scope = InternalScope('mock', **self.vo)
        files = create_files(3, scope, self.rse1_id)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.DATASET, self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)

        # Add a first rule to the DS
        rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse4, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None, ask_approval=True)[0]

        assert(get_rule(rule_id)['state'] == RuleState.WAITING_APPROVAL)

        approve_rule(rule_id=rule_id, approver=self.jdoe)

        assert(get_rule(rule_id)['state'] == RuleState.INJECT)

        rule_injector(once=True)

        # Check if the Locks are created properly
        for file in files:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 1)
        assert(get_rule(rule_id)['state'] == RuleState.REPLICATING)
Пример #21
0
    def test_to_repair_a_rule_with_NONE_grouping_whose_transfer_failed_and_flipping_to_other_rse(self):
        """ JUDGE REPAIRER: Test to repair a rule with 1 failed transfer and flip to other rse(lock)"""

        rule_repairer(once=True)  # Clean out the repairer
        scope = 'mock'
        files = create_files(4, scope, self.rse4_id, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe')
        attach_dids(scope, dataset, files, 'jdoe')

        rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.T1, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=None)[0]

        successful_transfer(scope=scope, name=files[0]['name'], rse_id=get_replica_locks(scope=files[0]['scope'], name=files[2]['name'])[0].rse_id, nowait=False)
        successful_transfer(scope=scope, name=files[1]['name'], rse_id=get_replica_locks(scope=files[1]['scope'], name=files[2]['name'])[0].rse_id, nowait=False)
        failed_transfer(scope=scope, name=files[2]['name'], rse_id=get_replica_locks(scope=files[2]['scope'], name=files[2]['name'])[0].rse_id)
        failed_transfer(scope=scope, name=files[3]['name'], rse_id=get_replica_locks(scope=files[3]['scope'], name=files[3]['name'])[0].rse_id)

        old_rse_id = get_replica_locks(scope=files[3]['scope'], name=files[3]['name'])[0].rse_id

        assert(rule_id == get_rule(rule_id)['id'].replace('-', '').lower())
        assert(RuleState.STUCK == get_rule(rule_id)['state'])
        rule_repairer(once=True)
        assert(RuleState.REPLICATING == get_rule(rule_id)['state'])
        assert(get_replica_locks(scope=files[3]['scope'], name=files[3]['name'])[0].rse_id != old_rse_id)
Пример #22
0
    def test_to_repair_a_rule_with_NONE_grouping_whose_transfer_failed(self):
        """ JUDGE REPAIRER: Test to repair a rule with 1 failed transfer (lock)"""

        rule_repairer(once=True)  # Clean out the repairer
        scope = InternalScope('mock')
        files = create_files(3, scope, self.rse4_id, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)

        rule_id = add_rule(dids=[{
            'scope': scope,
            'name': dataset
        }],
                           account=self.jdoe,
                           copies=1,
                           rse_expression=self.T1,
                           grouping='NONE',
                           weight=None,
                           lifetime=None,
                           locked=False,
                           subscription_id=None)[0]

        failed_rse_id = get_replica_locks(scope=files[2]['scope'],
                                          name=files[2]['name'])[0].rse_id
        assert (get_replica(
            scope=files[2]['scope'],
            name=files[2]['name'],
            rse_id=failed_rse_id)['state'] == ReplicaState.COPYING)
        assert (get_replica(scope=files[2]['scope'],
                            name=files[2]['name'],
                            rse_id=failed_rse_id)['lock_cnt'] == 1)

        successful_transfer(
            scope=scope,
            name=files[0]['name'],
            rse_id=get_replica_locks(scope=files[0]['scope'],
                                     name=files[2]['name'])[0].rse_id,
            nowait=False)
        successful_transfer(
            scope=scope,
            name=files[1]['name'],
            rse_id=get_replica_locks(scope=files[1]['scope'],
                                     name=files[2]['name'])[0].rse_id,
            nowait=False)
        failed_transfer(
            scope=scope,
            name=files[2]['name'],
            rse_id=get_replica_locks(scope=files[2]['scope'],
                                     name=files[2]['name'])[0].rse_id)

        assert (rule_id == get_rule(rule_id)['id'].replace('-', '').lower())
        assert (RuleState.STUCK == get_rule(rule_id)['state'])
        rule_repairer(once=True)
        assert (RuleState.REPLICATING == get_rule(rule_id)['state'])
        assert (get_replica(
            scope=files[2]['scope'],
            name=files[2]['name'],
            rse_id=failed_rse_id)['state'] == ReplicaState.UNAVAILABLE)
        assert (get_replica(scope=files[2]['scope'],
                            name=files[2]['name'],
                            rse_id=failed_rse_id)['lock_cnt'] == 0)
Пример #23
0
    def test_to_repair_a_rule_with_only_1_rse_whose_transfers_failed(self):
        """ JUDGE REPAIRER: Test to repair a rule with only 1 rse whose transfers failed (lock)"""

        rule_repairer(once=True)  # Clean out the repairer
        scope = InternalScope('mock')
        files = create_files(4, scope, self.rse4_id, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)

        rule_id = add_rule(dids=[{
            'scope': scope,
            'name': dataset
        }],
                           account=self.jdoe,
                           copies=1,
                           rse_expression=self.rse1,
                           grouping='DATASET',
                           weight=None,
                           lifetime=None,
                           locked=False,
                           subscription_id=None)[0]

        successful_transfer(
            scope=scope,
            name=files[0]['name'],
            rse_id=get_replica_locks(scope=files[0]['scope'],
                                     name=files[2]['name'])[0].rse_id,
            nowait=False)
        successful_transfer(
            scope=scope,
            name=files[1]['name'],
            rse_id=get_replica_locks(scope=files[1]['scope'],
                                     name=files[2]['name'])[0].rse_id,
            nowait=False)
        failed_transfer(
            scope=scope,
            name=files[2]['name'],
            rse_id=get_replica_locks(scope=files[2]['scope'],
                                     name=files[2]['name'])[0].rse_id)
        failed_transfer(
            scope=scope,
            name=files[3]['name'],
            rse_id=get_replica_locks(scope=files[3]['scope'],
                                     name=files[3]['name'])[0].rse_id)
        cancel_request_did(
            scope=scope,
            name=files[2]['name'],
            dest_rse_id=get_replica_locks(scope=files[2]['scope'],
                                          name=files[2]['name'])[0].rse_id)
        cancel_request_did(
            scope=scope,
            name=files[3]['name'],
            dest_rse_id=get_replica_locks(scope=files[3]['scope'],
                                          name=files[2]['name'])[0].rse_id)

        assert (rule_id == get_rule(rule_id)['id'].replace('-', '').lower())
        assert (RuleState.STUCK == get_rule(rule_id)['state'])
        rule_repairer(once=True)

        # Stil assert STUCK because of delays:
        assert (RuleState.STUCK == get_rule(rule_id)['state'])
        assert (get_replica_locks(
            scope=files[2]['scope'],
            name=files[2]['name'])[0].rse_id == get_replica_locks(
                scope=files[3]['scope'], name=files[3]['name'])[0].rse_id)
Пример #24
0
    def test_repair_a_rule_with_source_replica_expression(self):
        """ JUDGE EVALUATOR: Test the judge when a with two rules with source_replica_expression"""
        scope = InternalScope('mock')
        files = create_files(3, scope, self.rse4_id)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)

        # Add a first rule to the DS
        rule_id1 = add_rule(dids=[{
            'scope': scope,
            'name': dataset
        }],
                            account=self.jdoe,
                            copies=1,
                            rse_expression=self.rse1,
                            grouping='DATASET',
                            weight=None,
                            lifetime=None,
                            locked=False,
                            subscription_id=None)[0]
        rule_id2 = add_rule(dids=[{
            'scope': scope,
            'name': dataset
        }],
                            account=self.jdoe,
                            copies=1,
                            rse_expression=self.rse3,
                            grouping='DATASET',
                            weight=None,
                            lifetime=None,
                            locked=False,
                            subscription_id=None,
                            source_replica_expression=self.rse1)[0]

        assert (RuleState.REPLICATING == get_rule(rule_id1)['state'])
        assert (RuleState.STUCK == get_rule(rule_id2)['state'])

        successful_transfer(scope=scope,
                            name=files[0]['name'],
                            rse_id=self.rse1_id,
                            nowait=False)
        successful_transfer(scope=scope,
                            name=files[1]['name'],
                            rse_id=self.rse1_id,
                            nowait=False)
        successful_transfer(scope=scope,
                            name=files[2]['name'],
                            rse_id=self.rse1_id,
                            nowait=False)
        # Also make replicas AVAILABLE
        session = get_session()
        replica = session.query(models.RSEFileAssociation).filter_by(
            scope=scope, name=files[0]['name'], rse_id=self.rse1_id).one()
        replica.state = ReplicaState.AVAILABLE
        replica = session.query(models.RSEFileAssociation).filter_by(
            scope=scope, name=files[1]['name'], rse_id=self.rse1_id).one()
        replica.state = ReplicaState.AVAILABLE
        replica = session.query(models.RSEFileAssociation).filter_by(
            scope=scope, name=files[2]['name'], rse_id=self.rse1_id).one()
        replica.state = ReplicaState.AVAILABLE
        session.commit()

        rule_repairer(once=True)

        assert (RuleState.OK == get_rule(rule_id1)['state'])
        assert (RuleState.REPLICATING == get_rule(rule_id2)['state'])
Пример #25
0
    def test_to_repair_a_rule_with_only_1_rse_whose_site_is_blocklisted(self):
        """ JUDGE REPAIRER: Test to repair a rule with only 1 rse whose site is blocklisted"""

        rse = rse_name_generator()
        rse_id = add_rse(rse, **self.vo)
        set_local_account_limit(self.jdoe, rse_id, -1)
        rule_repairer(once=True)  # Clean out the repairer

        region = make_region().configure('dogpile.cache.memcached',
                                         expiration_time=900,
                                         arguments={
                                             'url':
                                             config_get(
                                                 'cache', 'url', False,
                                                 '127.0.0.1:11211'),
                                             'distributed_lock':
                                             True
                                         })

        def change_availability(new_value):
            update_rse(rse_id, {'availability_write': new_value})
            # clear cache
            region.delete(sha256(rse.encode()).hexdigest())

        for grouping, ignore_availability in itertools.product(
            ["NONE", "DATASET", "ALL"], [True, False]):
            scope = InternalScope('mock', **self.vo)
            files = create_files(1, scope, self.rse4_id, bytes_=100)
            dataset = 'dataset_' + str(uuid())
            add_did(scope, dataset, DIDType.DATASET, self.jdoe)
            attach_dids(scope, dataset, files, self.jdoe)

            if ignore_availability:
                change_availability(False)
                rule_id = add_rule(dids=[{
                    'scope': scope,
                    'name': dataset
                }],
                                   account=self.jdoe,
                                   copies=1,
                                   rse_expression=rse,
                                   grouping=grouping,
                                   weight=None,
                                   lifetime=None,
                                   locked=False,
                                   subscription_id=None,
                                   ignore_availability=ignore_availability,
                                   activity='DebugJudge')[0]
                assert (RuleState.STUCK == get_rule(rule_id)['state'])

                rule_repairer(once=True)
                assert (RuleState.REPLICATING == get_rule(rule_id)['state'])

                change_availability(True)
            else:
                rule_id = add_rule(dids=[{
                    'scope': scope,
                    'name': dataset
                }],
                                   account=self.jdoe,
                                   copies=1,
                                   rse_expression=rse,
                                   grouping=grouping,
                                   weight=None,
                                   lifetime=None,
                                   locked=False,
                                   subscription_id=None,
                                   ignore_availability=ignore_availability,
                                   activity='DebugJudge')[0]
                failed_transfer(scope=scope,
                                name=files[0]['name'],
                                rse_id=get_replica_locks(
                                    scope=files[0]['scope'],
                                    name=files[0]['name'])[0].rse_id)
                change_availability(False)
                assert (RuleState.STUCK == get_rule(rule_id)['state'])

                rule_repairer(once=True)
                assert (RuleState.STUCK == get_rule(rule_id)['state'])

                change_availability(True)
                rule_repairer(once=True)
                assert (RuleState.REPLICATING == get_rule(rule_id)['state'])
Пример #26
0
    def test_judge_evaluate_detach(self):
        """ JUDGE EVALUATOR: Test if the detach is done correctly"""
        re_evaluator(once=True)

        scope = InternalScope('mock', **self.vo)
        container = 'container_' + str(uuid())
        add_did(scope, container, DIDType.CONTAINER, self.jdoe)

        scope = InternalScope('mock', **self.vo)
        files = create_files(3, scope, self.rse1_id, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.DATASET, self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)
        attach_dids(scope, container, [{
            'scope': scope,
            'name': dataset
        }], self.jdoe)

        scope = InternalScope('mock', **self.vo)
        files = create_files(3, scope, self.rse1_id, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.DATASET, self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)
        attach_dids(scope, container, [{
            'scope': scope,
            'name': dataset
        }], self.jdoe)

        scope = InternalScope('mock', **self.vo)
        files = create_files(3, scope, self.rse1_id, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.DATASET, self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)
        attach_dids(scope, container, [{
            'scope': scope,
            'name': dataset
        }], self.jdoe)

        # Add a first rule to the Container
        rule_id = add_rule(dids=[{
            'scope': scope,
            'name': container
        }],
                           account=self.jdoe,
                           copies=1,
                           rse_expression=self.rse1,
                           grouping='ALL',
                           weight=None,
                           lifetime=None,
                           locked=False,
                           subscription_id=None)[0]

        # Fake judge
        re_evaluator(once=True)

        assert (9 == get_rule(rule_id)['locks_ok_cnt'])

        detach_dids(scope, dataset, [files[0]])

        # Fake judge
        re_evaluator(once=True)

        assert (8 == get_rule(rule_id)['locks_ok_cnt'])
Пример #27
0
def test_bb8_full_workflow(vo, root_account, jdoe_account, rse_factory,
                           mock_scope, did_factory):
    """BB8: Test the rebalance rule method"""
    config_core.set(section='bb8', option='allowed_accounts', value='jdoe')
    tot_rses = 4
    rses = [rse_factory.make_posix_rse() for _ in range(tot_rses)]
    rse1, rse1_id = rses[0]
    rse2, rse2_id = rses[1]
    rse3, rse3_id = rses[2]
    rse4, rse4_id = rses[3]

    # Add Tags
    # RSE 1 and 2 nmatch expression T1=true
    # RSE 3 and 4 nmatch expression T2=true
    T1 = tag_generator()
    T2 = tag_generator()
    add_rse_attribute(rse1_id, T1, True)
    add_rse_attribute(rse2_id, T1, True)
    add_rse_attribute(rse3_id, T2, True)
    add_rse_attribute(rse4_id, T2, True)

    # Add fake weights
    add_rse_attribute(rse1_id, "fakeweight", 10)
    add_rse_attribute(rse2_id, "fakeweight", 0)
    add_rse_attribute(rse3_id, "fakeweight", 0)
    add_rse_attribute(rse4_id, "fakeweight", 0)
    add_rse_attribute(rse1_id, "freespace", 1)
    add_rse_attribute(rse2_id, "freespace", 1)
    add_rse_attribute(rse3_id, "freespace", 1)
    add_rse_attribute(rse4_id, "freespace", 1)

    # Add quota
    set_local_account_limit(jdoe_account, rse1_id, -1)
    set_local_account_limit(jdoe_account, rse2_id, -1)
    set_local_account_limit(jdoe_account, rse3_id, -1)
    set_local_account_limit(jdoe_account, rse4_id, -1)

    set_local_account_limit(root_account, rse1_id, -1)
    set_local_account_limit(root_account, rse2_id, -1)
    set_local_account_limit(root_account, rse3_id, -1)
    set_local_account_limit(root_account, rse4_id, -1)

    # Invalid the cache because the result of parse_expression is cached
    REGION.invalidate()

    tot_datasets = 4
    # Create a list of datasets
    datasets = [did_factory.make_dataset() for _ in range(tot_datasets)]
    dsn = [dataset['name'] for dataset in datasets]

    rules = list()

    base_unit = 100000000000
    nb_files1 = 7
    nb_files2 = 5
    nb_files3 = 3
    nb_files4 = 2
    file_size = 1 * base_unit
    rule_to_rebalance = None

    # Add one secondary file
    files = create_files(1, mock_scope, rse1_id, bytes_=1)
    add_rule(dids=[{
        'scope': mock_scope,
        'name': files[0]['name']
    }],
             account=jdoe_account,
             copies=1,
             rse_expression=rse1,
             grouping='DATASET',
             weight=None,
             lifetime=-86400,
             locked=False,
             subscription_id=None)[0]
    for cnt in range(3, tot_rses):
        add_replicas(rses[cnt][1], files, jdoe_account)
        add_rule(dids=[{
            'scope': mock_scope,
            'name': files[0]['name']
        }],
                 account=jdoe_account,
                 copies=1,
                 rse_expression=rses[cnt][0],
                 grouping='DATASET',
                 weight=None,
                 lifetime=-86400,
                 locked=False,
                 subscription_id=None)[0]
    rule_cleaner(once=True)

    # Create dataset 1 of 800 GB and create a rule on RSE 1 and RSE 3
    files = create_files(nb_files1, mock_scope, rse1_id, bytes_=file_size)
    attach_dids(mock_scope, dsn[0], files, jdoe_account)

    rule_id = add_rule(dids=[{
        'scope': mock_scope,
        'name': dsn[0]
    }],
                       account=jdoe_account,
                       copies=1,
                       rse_expression=rse1,
                       grouping='DATASET',
                       weight=None,
                       lifetime=None,
                       locked=False,
                       subscription_id=None)[0]
    rules.append(rule_id)

    add_replicas(rse3_id, files, jdoe_account)
    rule_id = add_rule(dids=[{
        'scope': mock_scope,
        'name': dsn[0]
    }],
                       account=jdoe_account,
                       copies=1,
                       rse_expression=rse3,
                       grouping='DATASET',
                       weight=None,
                       lifetime=None,
                       locked=False,
                       subscription_id=None)[0]
    rules.append(rule_id)

    # Create dataset 2 of 500 GB and create a rule on RSE 1 and RSE 2
    files = create_files(nb_files2, mock_scope, rse1_id, bytes_=file_size)
    attach_dids(mock_scope, dsn[1], files, jdoe_account)

    rule_id = add_rule(dids=[{
        'scope': mock_scope,
        'name': dsn[1]
    }],
                       account=jdoe_account,
                       copies=1,
                       rse_expression=rse1,
                       grouping='DATASET',
                       weight=None,
                       lifetime=None,
                       locked=False,
                       subscription_id=None)[0]
    rules.append(rule_id)

    add_replicas(rse2_id, files, jdoe_account)
    rule_id = add_rule(dids=[{
        'scope': mock_scope,
        'name': dsn[1]
    }],
                       account=jdoe_account,
                       copies=1,
                       rse_expression=rse2,
                       grouping='DATASET',
                       weight=None,
                       lifetime=None,
                       locked=False,
                       subscription_id=None)[0]
    rules.append(rule_id)

    # Create dataset 3 of 300 GB and create a rule on RSE 1. The copy on RSE 3 is secondary
    files = create_files(nb_files3, mock_scope, rse1_id, bytes_=file_size)
    attach_dids(mock_scope, dsn[2], files, jdoe_account)

    rule_id = add_rule(dids=[{
        'scope': mock_scope,
        'name': dsn[2]
    }],
                       account=jdoe_account,
                       copies=1,
                       rse_expression=rse1,
                       grouping='DATASET',
                       weight=None,
                       lifetime=None,
                       locked=False,
                       subscription_id=None)[0]
    rule_to_rebalance = rule_id
    rules.append(rule_id)

    add_replicas(rse3_id, files, jdoe_account)
    rule_id = add_rule(dids=[{
        'scope': mock_scope,
        'name': dsn[2]
    }],
                       account=jdoe_account,
                       copies=1,
                       rse_expression=rse3,
                       grouping='DATASET',
                       weight=None,
                       lifetime=-86400,
                       locked=False,
                       subscription_id=None)[0]
    rule_cleaner(once=True)
    try:
        rule = get_rule(rule_id)
    except:
        pytest.raises(RuleNotFound, get_rule, rule_id)

    # Create dataset 4 of 200 GB and create a rule on RSE 3. The copy on RSE 2 is secondary
    files = create_files(nb_files4, mock_scope, rse3_id, bytes_=file_size)
    attach_dids(mock_scope, dsn[3], files, jdoe_account)

    rule_id = add_rule(dids=[{
        'scope': mock_scope,
        'name': dsn[3]
    }],
                       account=jdoe_account,
                       copies=1,
                       rse_expression=rse3,
                       grouping='DATASET',
                       weight=None,
                       lifetime=None,
                       locked=False,
                       subscription_id=None)[0]
    rules.append(rule_id)

    add_replicas(rse2_id, files, jdoe_account)
    rule_id = add_rule(dids=[{
        'scope': mock_scope,
        'name': dsn[3]
    }],
                       account=jdoe_account,
                       copies=1,
                       rse_expression=rse2,
                       grouping='DATASET',
                       weight=None,
                       lifetime=-86400,
                       locked=False,
                       subscription_id=None)[0]
    rule_cleaner(once=True)
    try:
        rule = get_rule(rule_id)
    except:
        pytest.raises(RuleNotFound, get_rule, rule_id)

    for dataset in dsn:
        set_status(mock_scope, dataset, open=False)

    for rse in rses:
        fill_rse_expired(rse[1])
        set_rse_usage(rse_id=rse[1],
                      source='min_free_space',
                      used=2 * base_unit,
                      free=2 * base_unit,
                      session=None)
        set_rse_usage(rse_id=rse[1],
                      source='storage',
                      used=15 * base_unit,
                      free=2 * base_unit,
                      session=None)
    set_rse_usage(rse_id=rse2_id,
                  source='min_free_space',
                  used=1 * base_unit,
                  free=1 * base_unit,
                  session=None)
    set_rse_usage(rse_id=rse2_id,
                  source='storage',
                  used=6 * base_unit,
                  free=5 * base_unit,
                  session=None)

    run_abacus(once=True, threads=1, fill_history_table=False, sleep_time=10)
    # Summary :
    # RSE 1 : 1500 GB primary + 1 B secondary
    tot_space = [
        src for src in get_rse_usage(rse1_id) if src['source'] == 'rucio'
    ][0]
    expired = [
        src for src in get_rse_usage(rse1_id) if src['source'] == 'expired'
    ][0]
    assert tot_space['used'] == (nb_files1 + nb_files2 +
                                 nb_files3) * file_size + 1
    assert expired['used'] == 1
    # RSE 2 : 500 GB primary + 100 GB secondary
    tot_space = [
        src for src in get_rse_usage(rse2_id) if src['source'] == 'rucio'
    ][0]
    expired = [
        src for src in get_rse_usage(rse2_id) if src['source'] == 'expired'
    ][0]
    assert tot_space['used'] == (nb_files2 + nb_files4) * file_size
    assert expired['used'] == nb_files4 * file_size
    # Total primary on T1=true : 2000 GB
    # Total secondary on T1=true : 200 GB
    # Ratio secondary / primary = 10  %
    # Ratio on RSE 1 : 0 %
    # Ratio on RSE 2 : 40 %

    # Now run BB8

    re_evaluator(once=True, sleep_time=30, did_limit=100)
    bb8_run(once=True,
            rse_expression='%s=true' % str(T1),
            move_subscriptions=False,
            use_dump=False,
            sleep_time=300,
            threads=1,
            dry_run=False)

    for rule_id in rules:
        rule = get_rule(rule_id)
        if rule_id != rule_to_rebalance:
            assert (rule['child_rule_id'] is None)
        else:
            assert (rule['child_rule_id'] is not None)
            assert (
                rule['expires_at'] <= datetime.utcnow() + timedelta(seconds=1)
            )  # timedelta needed to prevent failure due to rounding effects
            child_rule_id = rule['child_rule_id']
            child_rule = get_rule(child_rule_id)
            assert (child_rule['rse_expression'] == rse2)
            # For teardown, delete child rule
            update_rule(child_rule_id, {'lifetime': -86400})
    rule_cleaner(once=True)

    for dataset in dsn:
        set_metadata(mock_scope, dataset, 'lifetime', -86400)
    undertaker.run(once=True)
Пример #28
0
def test_bb8_rebalance_rule(vo, root_account, jdoe_account, rse_factory,
                            mock_scope, did_factory):
    """BB8: Test the rebalance rule method"""
    rse1, rse1_id = rse_factory.make_posix_rse()
    rse2, rse2_id = rse_factory.make_posix_rse()

    # Add Tags
    T1 = tag_generator()
    T2 = tag_generator()
    add_rse_attribute(rse1_id, T1, True)
    add_rse_attribute(rse2_id, T2, True)

    # Add fake weights
    add_rse_attribute(rse1_id, "fakeweight", 10)
    add_rse_attribute(rse2_id, "fakeweight", 0)

    # Add quota
    set_local_account_limit(jdoe_account, rse1_id, -1)
    set_local_account_limit(jdoe_account, rse2_id, -1)

    set_local_account_limit(root_account, rse1_id, -1)
    set_local_account_limit(root_account, rse2_id, -1)

    files = create_files(3, mock_scope, rse1_id)
    dataset = did_factory.make_dataset()
    attach_dids(mock_scope, dataset['name'], files, jdoe_account)
    set_status(mock_scope, dataset['name'], open=False)

    # Invalid the cache because the result of parse_expression is cached
    REGION.invalidate()

    rule_id = add_rule(dids=[{
        'scope': mock_scope,
        'name': dataset['name']
    }],
                       account=jdoe_account,
                       copies=1,
                       rse_expression=rse1,
                       grouping='NONE',
                       weight='fakeweight',
                       lifetime=None,
                       locked=False,
                       subscription_id=None)[0]
    rule = {}
    try:
        rule = get_rule(rule_id)
    except:
        pytest.raises(RuleNotFound, get_rule, rule_id)
    child_rule = rebalance_rule(rule, 'Rebalance', rse2, priority=3)

    rule_cleaner(once=True)

    assert (get_rule(rule_id)['expires_at'] <= datetime.utcnow())
    assert (get_rule(rule_id)['child_rule_id'] == child_rule)

    rule_cleaner(once=True)

    assert (get_rule(rule_id)['expires_at'] <= datetime.utcnow())

    successful_transfer(scope=mock_scope,
                        name=files[0]['name'],
                        rse_id=rse2_id,
                        nowait=False)
    successful_transfer(scope=mock_scope,
                        name=files[1]['name'],
                        rse_id=rse2_id,
                        nowait=False)
    with pytest.raises(UnsupportedOperation):
        delete_rule(rule_id)
    successful_transfer(scope=mock_scope,
                        name=files[2]['name'],
                        rse_id=rse2_id,
                        nowait=False)

    rule_cleaner(once=True)
    assert (get_rule(child_rule)['state'] == RuleState.OK)
    set_metadata(mock_scope, dataset['name'], 'lifetime', -86400)
    undertaker.run(once=True)