def test_to_repair_a_rule_with_NONE_grouping_whose_transfer_failed(self): """ JUDGE REPAIRER: Test to repair a rule with 1 failed transfer (lock)""" rule_repairer(once=True) # Clean out the repairer scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse4_id, bytes=100) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.T1, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=None)[0] failed_rse_id = get_replica_locks(scope=files[2]['scope'], name=files[2]['name'])[0].rse_id assert(get_replica(scope=files[2]['scope'], name=files[2]['name'], rse_id=failed_rse_id)['state'] == ReplicaState.COPYING) assert(get_replica(scope=files[2]['scope'], name=files[2]['name'], rse_id=failed_rse_id)['lock_cnt'] == 1) successful_transfer(scope=scope, name=files[0]['name'], rse_id=get_replica_locks(scope=files[0]['scope'], name=files[2]['name'])[0].rse_id, nowait=False) successful_transfer(scope=scope, name=files[1]['name'], rse_id=get_replica_locks(scope=files[1]['scope'], name=files[2]['name'])[0].rse_id, nowait=False) failed_transfer(scope=scope, name=files[2]['name'], rse_id=get_replica_locks(scope=files[2]['scope'], name=files[2]['name'])[0].rse_id) assert(rule_id == get_rule(rule_id)['id'].replace('-', '').lower()) assert(RuleState.STUCK == get_rule(rule_id)['state']) rule_repairer(once=True) assert(RuleState.REPLICATING == get_rule(rule_id)['state']) assert(get_replica(scope=files[2]['scope'], name=files[2]['name'], rse_id=failed_rse_id)['state'] == ReplicaState.UNAVAILABLE) assert(get_replica(scope=files[2]['scope'], name=files[2]['name'], rse_id=failed_rse_id)['lock_cnt'] == 0)
def test_add_rule_container_dataset_with_weights(self): """ REPLICATION RULE (CORE): Add a replication rule on a container, DATASET Grouping, WEIGHTS""" scope = 'mock' container = 'container_' + str(uuid()) add_did(scope, container, DIDType.from_sym('CONTAINER'), 'jdoe') all_files = [] dataset_files = [] for i in xrange(3): files = create_files(3, scope, self.rse1) all_files.extend(files) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') attach_dids(scope, container, [{'scope': scope, 'name': dataset}], 'jdoe') dataset_files.append({'scope': scope, 'name': dataset, 'files': files}) add_rule(dids=[{'scope': scope, 'name': container}], account='jdoe', copies=2, rse_expression=self.T1, grouping='DATASET', weight='fakeweight', lifetime=None, locked=False, subscription_id=None) t1 = set([self.rse1_id, self.rse3_id, self.rse5_id]) for dataset in dataset_files: first_locks = None for file in dataset['files']: if first_locks is None: first_locks = set([lock['rse_id'] for lock in get_replica_locks(scope=file['scope'], name=file['name'])]) rse_locks = set([lock['rse_id'] for lock in get_replica_locks(scope=file['scope'], name=file['name'])]) assert(len(t1.intersection(rse_locks)) == 2) assert(len(first_locks.intersection(rse_locks)) == 2) assert_in(self.rse1_id, rse_locks)
def test_judge_add_files_to_dataset_rule_on_container(self): """ JUDGE EVALUATOR: Test the judge when attaching file to dataset with rule on two levels of containers""" scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) attach_dids(scope, dataset, files, self.jdoe) parent_container = 'dataset_' + str(uuid()) add_did(scope, parent_container, DIDType.from_sym('CONTAINER'), self.jdoe) attach_dids(scope, parent_container, [{'scope': scope, 'name': dataset}], self.jdoe) parent_parent_container = 'dataset_' + str(uuid()) add_did(scope, parent_parent_container, DIDType.from_sym('CONTAINER'), self.jdoe) attach_dids(scope, parent_parent_container, [{'scope': scope, 'name': parent_container}], self.jdoe) # Add a first rule to the DS add_rule(dids=[{'scope': scope, 'name': parent_parent_container}], account=self.jdoe, copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None) # Fake judge re_evaluator(once=True) # Check if the Locks are created properly for file in files: assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2) # create more files and attach them more_files = create_files(3, scope, self.rse1_id) attach_dids(scope, dataset, more_files, self.jdoe) re_evaluator(once=True) # Check if the Locks are created properly for file in more_files: assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)
def test_judge_inject_delayed_rule(self): """ JUDGE INJECTOR: Test the judge when injecting a delayed rule""" scope = InternalScope('mock', **self.vo) files = create_files(1, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) [file] = files # Add a delayed rule rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None, delay_injection=3600)[0] rule = get_rule(rule_id) assert rule['state'] == RuleState.INJECT assert rule['updated_at'] < rule['created_at'] assert datetime.utcnow() + timedelta(seconds=3550) < rule['created_at'] < datetime.utcnow() + timedelta(seconds=3650) # The time to create the rule has not yet arrived. The injector must skip this rule, no locks must be created rule_injector(once=True) assert get_rule(rule_id)['state'] == RuleState.INJECT assert not get_replica_locks(scope=file['scope'], name=file['name']) # simulate that time to inject the rule has arrived @transactional_session def __update_created_at(session=None): session.query(ReplicationRule).filter_by(id=rule_id).one().created_at = datetime.utcnow() __update_created_at() # The injector must create the locks now rule_injector(once=True) assert get_rule(rule_id)['state'] == RuleState.REPLICATING assert len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2
def test_to_repair_a_rule_with_DATASET_grouping_whose_transfer_failed( self): """ JUDGE REPAIRER: Test to repair a rule with 1 failed transfer (lock)""" rule_repairer(once=True) # Clean out the repairer scope = InternalScope('mock') files = create_files(4, scope, self.rse4_id, bytes=100) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{ 'scope': scope, 'name': dataset }], account=self.jdoe, copies=1, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None, activity='DebugJudge')[0] successful_transfer( scope=scope, name=files[0]['name'], rse_id=get_replica_locks(scope=files[0]['scope'], name=files[2]['name'])[0].rse_id, nowait=False) successful_transfer( scope=scope, name=files[1]['name'], rse_id=get_replica_locks(scope=files[1]['scope'], name=files[2]['name'])[0].rse_id, nowait=False) failed_transfer( scope=scope, name=files[2]['name'], rse_id=get_replica_locks(scope=files[2]['scope'], name=files[2]['name'])[0].rse_id) failed_transfer( scope=scope, name=files[3]['name'], rse_id=get_replica_locks(scope=files[3]['scope'], name=files[3]['name'])[0].rse_id) assert (rule_id == get_rule(rule_id)['id'].replace('-', '').lower()) assert (RuleState.STUCK == get_rule(rule_id)['state']) rule_repairer(once=True) assert (RuleState.REPLICATING == get_rule(rule_id)['state']) assert (get_replica_locks( scope=files[2]['scope'], name=files[2]['name'])[0].rse_id == get_replica_locks( scope=files[3]['scope'], name=files[3]['name'])[0].rse_id) assert (get_replica_locks( scope=files[1]['scope'], name=files[1]['name'])[0].rse_id == get_replica_locks( scope=files[3]['scope'], name=files[3]['name'])[0].rse_id)
def test_judge_add_files_to_dataset(self): """ JUDGE EVALUATOR: Test the judge when adding files to dataset""" scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.DATASET, self.jdoe) # Add a first rule to the DS add_rule(dids=[{ 'scope': scope, 'name': dataset }], account=self.jdoe, copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None) attach_dids(scope, dataset, files, self.jdoe) re_evaluator(once=True) files = create_files(3, scope, self.rse1_id) attach_dids(scope, dataset, files, self.jdoe) # Fake judge re_evaluator(once=True) # Check if the Locks are created properly for file in files: assert (len( get_replica_locks(scope=file['scope'], name=file['name'])) == 2)
def test_judge_inject_rule(self): """ JUDGE INJECTOR: Test the judge when injecting a rule""" scope = 'mock' files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') # Add a first rule to the DS rule_id = add_rule(dids=[{ 'scope': scope, 'name': dataset }], account='jdoe', copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None, asynchronous=True)[0] assert (get_rule(rule_id)['state'] == RuleState.INJECT) rule_injector(once=True) # Check if the Locks are created properly for file in files: assert (len( get_replica_locks(scope=file['scope'], name=file['name'])) == 2) assert (get_rule(rule_id)['state'] == RuleState.REPLICATING)
def test_to_repair_a_rule_with_only_1_rse_whose_transfers_failed(self): """ JUDGE REPAIRER: Test to repair a rule with only 1 rse whose transfers failed (lock)""" rule_repairer(once=True) # Clean out the repairer scope = InternalScope('mock', **self.vo) files = create_files(4, scope, self.rse4_id, bytes=100) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0] successful_transfer(scope=scope, name=files[0]['name'], rse_id=get_replica_locks(scope=files[0]['scope'], name=files[2]['name'])[0].rse_id, nowait=False) successful_transfer(scope=scope, name=files[1]['name'], rse_id=get_replica_locks(scope=files[1]['scope'], name=files[2]['name'])[0].rse_id, nowait=False) failed_transfer(scope=scope, name=files[2]['name'], rse_id=get_replica_locks(scope=files[2]['scope'], name=files[2]['name'])[0].rse_id) failed_transfer(scope=scope, name=files[3]['name'], rse_id=get_replica_locks(scope=files[3]['scope'], name=files[3]['name'])[0].rse_id) cancel_request_did(scope=scope, name=files[2]['name'], dest_rse_id=get_replica_locks(scope=files[2]['scope'], name=files[2]['name'])[0].rse_id) cancel_request_did(scope=scope, name=files[3]['name'], dest_rse_id=get_replica_locks(scope=files[3]['scope'], name=files[2]['name'])[0].rse_id) assert(rule_id == get_rule(rule_id)['id'].replace('-', '').lower()) assert(RuleState.STUCK == get_rule(rule_id)['state']) rule_repairer(once=True) # Stil assert STUCK because of delays: assert(RuleState.STUCK == get_rule(rule_id)['state']) assert(get_replica_locks(scope=files[2]['scope'], name=files[2]['name'])[0].rse_id == get_replica_locks(scope=files[3]['scope'], name=files[3]['name'])[0].rse_id)
def test_to_repair_a_rule_with_only_1_rse_whose_transfers_failed(self): """ JUDGE REPAIRER: Test to repair a rule with only 1 rse whose transfers failed (lock)""" rule_repairer(once=True) # Clean out the repairer scope = 'mock' files = create_files(4, scope, self.rse4, bytes=100) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0] successful_transfer(scope=scope, name=files[0]['name'], rse_id=get_replica_locks(scope=files[0]['scope'], name=files[2]['name'])[0].rse_id, nowait=False) successful_transfer(scope=scope, name=files[1]['name'], rse_id=get_replica_locks(scope=files[1]['scope'], name=files[2]['name'])[0].rse_id, nowait=False) failed_transfer(scope=scope, name=files[2]['name'], rse_id=get_replica_locks(scope=files[2]['scope'], name=files[2]['name'])[0].rse_id) failed_transfer(scope=scope, name=files[3]['name'], rse_id=get_replica_locks(scope=files[3]['scope'], name=files[3]['name'])[0].rse_id) cancel_request_did(scope=scope, name=files[2]['name'], dest_rse_id=get_replica_locks(scope=files[2]['scope'], name=files[2]['name'])[0].rse_id) cancel_request_did(scope=scope, name=files[3]['name'], dest_rse_id=get_replica_locks(scope=files[3]['scope'], name=files[2]['name'])[0].rse_id) assert(rule_id == get_rule(rule_id)['id'].replace('-', '').lower()) assert(RuleState.STUCK == get_rule(rule_id)['state']) rule_repairer(once=True) # Stil assert STUCK because of delays: assert(RuleState.STUCK == get_rule(rule_id)['state']) assert(get_replica_locks(scope=files[2]['scope'], name=files[2]['name'])[0].rse_id == get_replica_locks(scope=files[3]['scope'], name=files[3]['name'])[0].rse_id)
def test_add_rule_file_none(self): """ REPLICATION RULE (CORE): Add a replication rule on a group of files, NONE Grouping""" scope = 'mock' files = create_files(3, scope, self.rse1) add_rule(dids=files, account='jdoe', copies=2, rse_expression=self.T1, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=None) # Check if the Locks are created properly t1 = set([self.rse1_id, self.rse1_id, self.rse3_id, self.rse5_id]) for file in files: rse_locks = set([lock['rse_id'] for lock in get_replica_locks(scope=file['scope'], name=file['name'])]) assert(len(t1.intersection(rse_locks)) > 0) assert_not_in(self.rse4_id, rse_locks)
def test_add_rules_datasets_none(self): """ REPLICATION RULE (CORE): Add replication rules to multiple datasets, NONE Grouping""" scope = 'mock' files1 = create_files(3, scope, self.rse4) dataset1 = 'dataset_' + str(uuid()) add_did(scope, dataset1, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset1, files1, 'jdoe') files2 = create_files(3, scope, self.rse4) dataset2 = 'dataset_' + str(uuid()) add_did(scope, dataset2, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset2, files2, 'jdoe') # Add the rules to both DS add_rules(dids=[{'scope': scope, 'name': dataset1}, {'scope': scope, 'name': dataset2}], rules=[{'account': 'jdoe', 'copies': 1, 'rse_expression': self.T1, 'grouping': 'NONE', 'weight': None, 'lifetime': None, 'locked': False, 'subscription_id': None}, {'account': 'root', 'copies': 1, 'rse_expression': self.T1, 'grouping': 'NONE', 'weight': 'fakeweight', 'lifetime': None, 'locked': False, 'subscription_id': None}]) # Check if the Locks are created properly for file in files1: rse_locks = [lock['rse_id'] for lock in get_replica_locks(scope=file['scope'], name=file['name'])] assert(rse_locks[0] == rse_locks[1]) for file in files2: rse_locks = [lock['rse_id'] for lock in get_replica_locks(scope=file['scope'], name=file['name'])] assert(rse_locks[0] == rse_locks[1])
def test_judge_expire_rule(self): """ JUDGE CLEANER: Test the judge when deleting expired rules""" scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) add_rule(dids=[{ 'scope': scope, 'name': dataset }], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='NONE', weight='fakeweight', lifetime=-3, locked=False, subscription_id=None)[0] add_rule(dids=[{ 'scope': scope, 'name': dataset }], account=self.jdoe, copies=2, rse_expression=self.T1, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] add_rule(dids=[{ 'scope': scope, 'name': dataset }], account=self.jdoe, copies=3, rse_expression=self.T1, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] rule_cleaner(once=True) for file in files: rse_locks = get_replica_locks(scope=file['scope'], name=file['name']) assert (len(rse_locks) == 5)
def test_delete_rule(self): """ REPLICATION RULE (CORE): Test to delete a previously created rule""" scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='DATASET', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] delete_rule(rule_id) for file in files: rse_locks = get_replica_locks(scope=file['scope'], name=file['name']) assert(len(rse_locks) == 0) assert_raises(RuleNotFound, delete_rule, uuid())
def test_repair_a_rule_with_missing_locks(self): """ JUDGE EVALUATOR: Test the judge when a rule gets STUCK from re_evaluating and there are missing locks""" scope = 'mock' files = create_files(3, scope, self.rse4) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') # Add a first rule to the DS rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0] attach_dids(scope, dataset, files, 'jdoe') # Fake judge re_evaluator(once=True) # Check if the Locks are created properly for file in files: assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2) # Add more files to the DID files2 = create_files(3, scope, self.rse4) attach_dids(scope, dataset, files2, 'jdoe') # Mark the rule STUCK to fake that the re-evaluation failed session = get_session() rule = session.query(models.ReplicationRule).filter_by(id=rule_id).one() rule.state = RuleState.STUCK session.commit() rule_repairer(once=True) for file in files: assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2) for file in files2: assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2) assert(len(set([lock.rse_id for lock in get_replica_locks(scope=files[0]['scope'], name=files[0]['name'])]).intersection(set([lock.rse_id for lock in get_replica_locks(scope=file['scope'], name=file['name'])]))) == 2) assert(12 == get_rule(rule_id)['locks_replicating_cnt'])
def test_repair_a_rule_with_missing_locks(self): """ JUDGE EVALUATOR: Test the judge when a rule gets STUCK from re_evaluating and there are missing locks""" scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse4_id) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.DATASET, self.jdoe) # Add a first rule to the DS rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0] attach_dids(scope, dataset, files, self.jdoe) # Fake judge re_evaluator(once=True) # Check if the Locks are created properly for file in files: assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2) # Add more files to the DID files2 = create_files(3, scope, self.rse4_id) attach_dids(scope, dataset, files2, self.jdoe) # Mark the rule STUCK to fake that the re-evaluation failed session = get_session() rule = session.query(models.ReplicationRule).filter_by(id=rule_id).one() rule.state = RuleState.STUCK session.commit() rule_repairer(once=True) for file in files: assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2) for file in files2: assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2) assert(len(set([lock.rse_id for lock in get_replica_locks(scope=files[0]['scope'], name=files[0]['name'])]).intersection(set([lock.rse_id for lock in get_replica_locks(scope=file['scope'], name=file['name'])]))) == 2) assert(12 == get_rule(rule_id)['locks_replicating_cnt'])
def test_add_rule_dataset_dataset(self): """ REPLICATION RULE (CORE): Add a replication rule on a dataset, DATASET Grouping""" scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None) # Check if the Locks are created properly t1 = set([self.rse1_id, self.rse3_id, self.rse5_id]) first_locks = None for file in files: if first_locks is None: first_locks = set([lock['rse_id'] for lock in get_replica_locks(scope=file['scope'], name=file['name'])]) rse_locks = set([lock['rse_id'] for lock in get_replica_locks(scope=file['scope'], name=file['name'])]) assert(len(t1.intersection(rse_locks)) == 2) assert(len(first_locks.intersection(rse_locks)) == 2) # Check if the DatasetLocks are created properly dataset_locks = [lock for lock in get_dataset_locks(scope=scope, name=dataset)] assert(len(t1.intersection(set([lock['rse_id'] for lock in dataset_locks]))) == 2) assert(len(first_locks.intersection(set([lock['rse_id'] for lock in dataset_locks]))) == 2)
def test_add_rule_dataset_none_with_weights(self): """ REPLICATION RULE (CORE): Add a replication rule on a dataset, NONE Grouping, WEIGHTS""" scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='NONE', weight="fakeweight", lifetime=None, locked=False, subscription_id=None) # Check if the Locks are created properly t1 = set([self.rse1_id, self.rse3_id, self.rse5_id]) for file in files: rse_locks = set([lock['rse_id'] for lock in get_replica_locks(scope=file['scope'], name=file['name'])]) assert(len(t1.intersection(rse_locks)) == 2) assert_in(self.rse1_id, rse_locks)
def test_judge_add_files_to_dataset_with_2_rules(self): """ JUDGE EVALUATOR: Test the judge when adding files to dataset with 2 rules""" scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') # Add a first rule to the DS add_rule(dids=[{ 'scope': scope, 'name': dataset }], account='jdoe', copies=1, rse_expression=self.rse5, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None) add_rule(dids=[{ 'scope': scope, 'name': dataset }], account='root', copies=1, rse_expression=self.rse5, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None) attach_dids(scope, dataset, files, 'jdoe') re_evaluator(once=True) files = create_files(3, scope, self.rse1) attach_dids(scope, dataset, files, 'jdoe') # Fake judge re_evaluator(once=True) # Check if the Locks are created properly for file in files: assert (len( get_replica_locks(scope=file['scope'], name=file['name'])) == 2)
def test_delete_rule_and_cancel_transfers(self): """ REPLICATION RULE (CORE): Test to delete a previously created rule and do not cancel overlapping transfers""" scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') rule_id_1 = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=3, rse_expression=self.T1, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] delete_rule(rule_id_1) for file in files: rse_locks = get_replica_locks(scope=file['scope'], name=file['name']) assert(len(rse_locks) == 5) # TODO Need to check transfer queue here, this is actually not the check of this test case assert_raises(RuleNotFound, delete_rule, uuid())
def test_add_rule_container_none(self): """ REPLICATION RULE (CORE): Add a replication rule on a container, NONE Grouping""" scope = 'mock' container = 'container_' + str(uuid()) add_did(scope, container, DIDType.from_sym('CONTAINER'), 'jdoe') all_files = [] for i in xrange(3): files = create_files(3, scope, self.rse1) all_files.extend(files) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') attach_dids(scope, container, [{'scope': scope, 'name': dataset}], 'jdoe') add_rule(dids=[{'scope': scope, 'name': container}], account='jdoe', copies=1, rse_expression=self.T2, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=None) for file in all_files: rse_locks = set([lock['rse_id'] for lock in get_replica_locks(scope=file['scope'], name=file['name'])]) assert_in(self.rse4_id, rse_locks) assert_not_in(self.rse5_id, rse_locks)
def test_judge_add_dataset_to_container(self): """ JUDGE EVALUATOR: Test the judge when adding dataset to container""" scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') parent_container = 'dataset_' + str(uuid()) add_did(scope, parent_container, DIDType.from_sym('CONTAINER'), 'jdoe') # Add a first rule to the DS add_rule(dids=[{ 'scope': scope, 'name': parent_container }], account='jdoe', copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None) attach_dids(scope, parent_container, [{ 'scope': scope, 'name': dataset }], 'jdoe') # Fake judge re_evaluator(once=True) # Check if the Locks are created properly for file in files: assert (len( get_replica_locks(scope=file['scope'], name=file['name'])) == 2) # Check if the DatasetLocks are created properly dataset_locks = [ lock for lock in get_dataset_locks(scope=scope, name=dataset) ] assert (len(dataset_locks) == 2)
def test_judge_add_files_to_dataset(self): """ JUDGE EVALUATOR: Test the judge when adding files to dataset""" scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') # Add a first rule to the DS add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None) attach_dids(scope, dataset, files, 'jdoe') re_evaluator(once=True) files = create_files(3, scope, self.rse1) attach_dids(scope, dataset, files, 'jdoe') # Fake judge re_evaluator(once=True) # Check if the Locks are created properly for file in files: assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)
def test_judge_ask_approval(self): """ JUDGE INJECTOR: Test the judge when asking approval for a rule""" scope = InternalScope('mock') files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) attach_dids(scope, dataset, files, self.jdoe) # Add a first rule to the DS rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse4, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None, ask_approval=True)[0] assert(get_rule(rule_id)['state'] == RuleState.WAITING_APPROVAL) approve_rule(rule_id=rule_id, approver=self.jdoe) assert(get_rule(rule_id)['state'] == RuleState.INJECT) rule_injector(once=True) # Check if the Locks are created properly for file in files: assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 1) assert(get_rule(rule_id)['state'] == RuleState.REPLICATING)
def test_to_repair_a_rule_with_only_1_rse_whose_site_is_blocklisted(self): """ JUDGE REPAIRER: Test to repair a rule with only 1 rse whose site is blocklisted""" rse = rse_name_generator() rse_id = add_rse(rse, **self.vo) set_local_account_limit(self.jdoe, rse_id, -1) rule_repairer(once=True) # Clean out the repairer region = make_region().configure('dogpile.cache.memcached', expiration_time=900, arguments={ 'url': config_get( 'cache', 'url', False, '127.0.0.1:11211'), 'distributed_lock': True }) def change_availability(new_value): update_rse(rse_id, {'availability_write': new_value}) # clear cache region.delete(sha256(rse.encode()).hexdigest()) for grouping, ignore_availability in itertools.product( ["NONE", "DATASET", "ALL"], [True, False]): scope = InternalScope('mock', **self.vo) files = create_files(1, scope, self.rse4_id, bytes_=100) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) if ignore_availability: change_availability(False) rule_id = add_rule(dids=[{ 'scope': scope, 'name': dataset }], account=self.jdoe, copies=1, rse_expression=rse, grouping=grouping, weight=None, lifetime=None, locked=False, subscription_id=None, ignore_availability=ignore_availability, activity='DebugJudge')[0] assert (RuleState.STUCK == get_rule(rule_id)['state']) rule_repairer(once=True) assert (RuleState.REPLICATING == get_rule(rule_id)['state']) change_availability(True) else: rule_id = add_rule(dids=[{ 'scope': scope, 'name': dataset }], account=self.jdoe, copies=1, rse_expression=rse, grouping=grouping, weight=None, lifetime=None, locked=False, subscription_id=None, ignore_availability=ignore_availability, activity='DebugJudge')[0] failed_transfer(scope=scope, name=files[0]['name'], rse_id=get_replica_locks( scope=files[0]['scope'], name=files[0]['name'])[0].rse_id) change_availability(False) assert (RuleState.STUCK == get_rule(rule_id)['state']) rule_repairer(once=True) assert (RuleState.STUCK == get_rule(rule_id)['state']) change_availability(True) rule_repairer(once=True) assert (RuleState.REPLICATING == get_rule(rule_id)['state'])