Esempio n. 1
0
    def test_account_counter_judge_evaluate_detach(self):
        """ JUDGE EVALUATOR: Test if the account counter is updated correctly when a file is removed from a DS"""
        re_evaluator(once=True)
        account_update(once=True)

        scope = 'mock'
        files = create_files(3, scope, self.rse1, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe')
        attach_dids(scope, dataset, files, 'jdoe')

        # Add a first rule to the DS
        add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='ALL', weight=None, lifetime=None, locked=False, subscription_id=None)

        account_update(once=True)

        account_counter_before = get_counter(self.rse1_id, 'jdoe')

        detach_dids(scope, dataset, [files[0]])

        # Fake judge
        re_evaluator(once=True)
        account_update(once=True)

        account_counter_after = get_counter(self.rse1_id, 'jdoe')
        assert(account_counter_before['bytes'] - 100 == account_counter_after['bytes'])
        assert(account_counter_before['files'] - 1 == account_counter_after['files'])
Esempio n. 2
0
    def test_judge_add_files_to_dataset(self):
        """ JUDGE EVALUATOR: Test the judge when adding files to dataset"""
        scope = InternalScope('mock', **self.vo)
        files = create_files(3, scope, self.rse1_id)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.DATASET, self.jdoe)

        # Add a first rule to the DS
        add_rule(dids=[{
            'scope': scope,
            'name': dataset
        }],
                 account=self.jdoe,
                 copies=2,
                 rse_expression=self.T1,
                 grouping='DATASET',
                 weight=None,
                 lifetime=None,
                 locked=False,
                 subscription_id=None)

        attach_dids(scope, dataset, files, self.jdoe)
        re_evaluator(once=True)

        files = create_files(3, scope, self.rse1_id)
        attach_dids(scope, dataset, files, self.jdoe)

        # Fake judge
        re_evaluator(once=True)

        # Check if the Locks are created properly
        for file in files:
            assert (len(
                get_replica_locks(scope=file['scope'],
                                  name=file['name'])) == 2)
Esempio n. 3
0
    def test_judge_evaluate_detach_datasetlock(self):
        """ JUDGE EVALUATOR: Test if the a datasetlock is detached correctly when removing a dataset from a container"""
        re_evaluator(once=True)

        scope = 'mock'
        files = create_files(3, scope, self.rse1, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe')
        attach_dids(scope, dataset, files, 'jdoe')

        container = 'container_' + str(uuid())
        add_did(scope, container, DIDType.from_sym('CONTAINER'), 'jdoe')
        attach_dids(scope, container, [{'scope': scope, 'name': dataset}], 'jdoe')

        # Add a rule to the Container
        add_rule(dids=[{'scope': scope, 'name': container}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)

        # Check if the datasetlock is there
        locks = [ds_lock for ds_lock in get_dataset_locks(scope=scope, name=dataset)]
        assert(len(locks) > 0)

        detach_dids(scope, container, [{'scope': scope, 'name': dataset}])

        # Fake judge
        re_evaluator(once=True)

        locks = [ds_lock for ds_lock in get_dataset_locks(scope=scope, name=dataset)]
        assert(len(locks) == 0)
Esempio n. 4
0
    def test_judge_add_files_to_dataset_rule_on_container(self):
        """ JUDGE EVALUATOR: Test the judge when attaching file to dataset with rule on two levels of containers"""
        scope = InternalScope('mock', **self.vo)
        files = create_files(3, scope, self.rse1_id)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)

        parent_container = 'dataset_' + str(uuid())
        add_did(scope, parent_container, DIDType.from_sym('CONTAINER'), self.jdoe)
        attach_dids(scope, parent_container, [{'scope': scope, 'name': dataset}], self.jdoe)

        parent_parent_container = 'dataset_' + str(uuid())
        add_did(scope, parent_parent_container, DIDType.from_sym('CONTAINER'), self.jdoe)
        attach_dids(scope, parent_parent_container, [{'scope': scope, 'name': parent_container}], self.jdoe)

        # Add a first rule to the DS
        add_rule(dids=[{'scope': scope, 'name': parent_parent_container}], account=self.jdoe, copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)

        # Fake judge
        re_evaluator(once=True)

        # Check if the Locks are created properly
        for file in files:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)

        # create more files and attach them
        more_files = create_files(3, scope, self.rse1_id)
        attach_dids(scope, dataset, more_files, self.jdoe)
        re_evaluator(once=True)
        # Check if the Locks are created properly
        for file in more_files:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)
Esempio n. 5
0
    def test_judge_evaluate_detach_datasetlock(self):
        """ JUDGE EVALUATOR: Test if the a datasetlock is detached correctly when removing a dataset from a container"""
        re_evaluator(once=True)

        scope = InternalScope('mock', **self.vo)
        files = create_files(3, scope, self.rse1_id, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)

        container = 'container_' + str(uuid())
        add_did(scope, container, DIDType.from_sym('CONTAINER'), self.jdoe)
        attach_dids(scope, container, [{'scope': scope, 'name': dataset}], self.jdoe)

        # Add a rule to the Container
        add_rule(dids=[{'scope': scope, 'name': container}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)

        # Check if the datasetlock is there
        locks = [ds_lock for ds_lock in get_dataset_locks(scope=scope, name=dataset)]
        assert(len(locks) > 0)

        detach_dids(scope, container, [{'scope': scope, 'name': dataset}])

        # Fake judge
        re_evaluator(once=True)

        locks = [ds_lock for ds_lock in get_dataset_locks(scope=scope, name=dataset)]
        assert(len(locks) == 0)
Esempio n. 6
0
    def test_account_counter_judge_evaluate_detach(self):
        """ JUDGE EVALUATOR: Test if the account counter is updated correctly when a file is removed from a DS"""
        re_evaluator(once=True)
        account_update(once=True)

        scope = InternalScope('mock', **self.vo)
        files = create_files(3, scope, self.rse1_id, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)

        # Add a first rule to the DS
        add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='ALL', weight=None, lifetime=None, locked=False, subscription_id=None)

        account_update(once=True)

        account_counter_before = get_usage(self.rse1_id, self.jdoe)

        detach_dids(scope, dataset, [files[0]])

        # Fake judge
        re_evaluator(once=True)
        account_update(once=True)

        account_counter_after = get_usage(self.rse1_id, self.jdoe)
        assert(account_counter_before['bytes'] - 100 == account_counter_after['bytes'])
        assert(account_counter_before['files'] - 1 == account_counter_after['files'])
Esempio n. 7
0
    def test_judge_add_files_to_dataset_with_2_rules(self):
        """ JUDGE EVALUATOR: Test the judge when adding files to dataset with 2 rules"""
        scope = 'mock'
        files = create_files(3, scope, self.rse1)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe')

        # Add a first rule to the DS
        add_rule(dids=[{
            'scope': scope,
            'name': dataset
        }],
                 account='jdoe',
                 copies=1,
                 rse_expression=self.rse5,
                 grouping='DATASET',
                 weight=None,
                 lifetime=None,
                 locked=False,
                 subscription_id=None)
        add_rule(dids=[{
            'scope': scope,
            'name': dataset
        }],
                 account='root',
                 copies=1,
                 rse_expression=self.rse5,
                 grouping='DATASET',
                 weight=None,
                 lifetime=None,
                 locked=False,
                 subscription_id=None)

        attach_dids(scope, dataset, files, 'jdoe')
        re_evaluator(once=True)

        files = create_files(3, scope, self.rse1)
        attach_dids(scope, dataset, files, 'jdoe')

        # Fake judge
        re_evaluator(once=True)

        # Check if the Locks are created properly
        for file in files:
            assert (len(
                get_replica_locks(scope=file['scope'],
                                  name=file['name'])) == 2)
Esempio n. 8
0
    def test_judge_add_dataset_to_container(self):
        """ JUDGE EVALUATOR: Test the judge when adding dataset to container"""
        scope = 'mock'
        files = create_files(3, scope, self.rse1)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe')
        attach_dids(scope, dataset, files, 'jdoe')

        parent_container = 'dataset_' + str(uuid())
        add_did(scope, parent_container, DIDType.from_sym('CONTAINER'), 'jdoe')
        # Add a first rule to the DS
        add_rule(dids=[{
            'scope': scope,
            'name': parent_container
        }],
                 account='jdoe',
                 copies=2,
                 rse_expression=self.T1,
                 grouping='DATASET',
                 weight=None,
                 lifetime=None,
                 locked=False,
                 subscription_id=None)
        attach_dids(scope, parent_container, [{
            'scope': scope,
            'name': dataset
        }], 'jdoe')
        # Fake judge
        re_evaluator(once=True)

        # Check if the Locks are created properly
        for file in files:
            assert (len(
                get_replica_locks(scope=file['scope'],
                                  name=file['name'])) == 2)

        # Check if the DatasetLocks are created properly
        dataset_locks = [
            lock for lock in get_dataset_locks(scope=scope, name=dataset)
        ]
        assert (len(dataset_locks) == 2)
Esempio n. 9
0
    def test_judge_add_files_to_dataset(self):
        """ JUDGE EVALUATOR: Test the judge when adding files to dataset"""
        scope = 'mock'
        files = create_files(3, scope, self.rse1)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe')

        # Add a first rule to the DS
        add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)

        attach_dids(scope, dataset, files, 'jdoe')
        re_evaluator(once=True)

        files = create_files(3, scope, self.rse1)
        attach_dids(scope, dataset, files, 'jdoe')

        # Fake judge
        re_evaluator(once=True)

        # Check if the Locks are created properly
        for file in files:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)
Esempio n. 10
0
    def test_dataset_callback_with_evaluator(self):
        """ REPLICATION RULE (CORE): Test dataset callback with judge evaluator"""

        scope = 'mock'
        files = create_files(3, scope, self.rse1, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe')

        rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse3, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None, notify='C')[0]

        assert(False == check_dataset_ok_callback(scope, dataset, self.rse3, rule_id))

        attach_dids(scope, dataset, files, 'jdoe')
        set_status(scope=scope, name=dataset, open=False)
        assert(False == check_dataset_ok_callback(scope, dataset, self.rse3, rule_id))

        re_evaluator(once=True)

        successful_transfer(scope=scope, name=files[0]['name'], rse_id=self.rse3_id, nowait=False)
        successful_transfer(scope=scope, name=files[1]['name'], rse_id=self.rse3_id, nowait=False)
        successful_transfer(scope=scope, name=files[2]['name'], rse_id=self.rse3_id, nowait=False)

        assert(True == check_dataset_ok_callback(scope, dataset, self.rse3, rule_id))
Esempio n. 11
0
    def test_repair_a_rule_with_missing_locks(self):
        """ JUDGE EVALUATOR: Test the judge when a rule gets STUCK from re_evaluating and there are missing locks"""
        scope = InternalScope('mock', **self.vo)
        files = create_files(3, scope, self.rse4_id)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.DATASET, self.jdoe)

        # Add a first rule to the DS
        rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0]

        attach_dids(scope, dataset, files, self.jdoe)

        # Fake judge
        re_evaluator(once=True)

        # Check if the Locks are created properly
        for file in files:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)

        # Add more files to the DID
        files2 = create_files(3, scope, self.rse4_id)
        attach_dids(scope, dataset, files2, self.jdoe)

        # Mark the rule STUCK to fake that the re-evaluation failed
        session = get_session()
        rule = session.query(models.ReplicationRule).filter_by(id=rule_id).one()
        rule.state = RuleState.STUCK
        session.commit()

        rule_repairer(once=True)

        for file in files:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)
        for file in files2:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)
            assert(len(set([lock.rse_id for lock in get_replica_locks(scope=files[0]['scope'], name=files[0]['name'])]).intersection(set([lock.rse_id for lock in get_replica_locks(scope=file['scope'], name=file['name'])]))) == 2)
        assert(12 == get_rule(rule_id)['locks_replicating_cnt'])
Esempio n. 12
0
    def test_repair_a_rule_with_missing_locks(self):
        """ JUDGE EVALUATOR: Test the judge when a rule gets STUCK from re_evaluating and there are missing locks"""
        scope = 'mock'
        files = create_files(3, scope, self.rse4)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe')

        # Add a first rule to the DS
        rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0]

        attach_dids(scope, dataset, files, 'jdoe')

        # Fake judge
        re_evaluator(once=True)

        # Check if the Locks are created properly
        for file in files:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)

        # Add more files to the DID
        files2 = create_files(3, scope, self.rse4)
        attach_dids(scope, dataset, files2, 'jdoe')

        # Mark the rule STUCK to fake that the re-evaluation failed
        session = get_session()
        rule = session.query(models.ReplicationRule).filter_by(id=rule_id).one()
        rule.state = RuleState.STUCK
        session.commit()

        rule_repairer(once=True)

        for file in files:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)
        for file in files2:
            assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2)
            assert(len(set([lock.rse_id for lock in get_replica_locks(scope=files[0]['scope'], name=files[0]['name'])]).intersection(set([lock.rse_id for lock in get_replica_locks(scope=file['scope'], name=file['name'])]))) == 2)
        assert(12 == get_rule(rule_id)['locks_replicating_cnt'])
Esempio n. 13
0
    def test_judge_evaluate_detach(self):
        """ JUDGE EVALUATOR: Test if the detach is done correctly"""
        re_evaluator(once=True)

        scope = InternalScope('mock', **self.vo)
        container = 'container_' + str(uuid())
        add_did(scope, container, DIDType.from_sym('CONTAINER'), self.jdoe)

        scope = InternalScope('mock', **self.vo)
        files = create_files(3, scope, self.rse1_id, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)
        attach_dids(scope, container, [{'scope': scope, 'name': dataset}], self.jdoe)

        scope = InternalScope('mock', **self.vo)
        files = create_files(3, scope, self.rse1_id, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)
        attach_dids(scope, container, [{'scope': scope, 'name': dataset}], self.jdoe)

        scope = InternalScope('mock', **self.vo)
        files = create_files(3, scope, self.rse1_id, bytes=100)
        dataset = 'dataset_' + str(uuid())
        add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe)
        attach_dids(scope, dataset, files, self.jdoe)
        attach_dids(scope, container, [{'scope': scope, 'name': dataset}], self.jdoe)

        # Add a first rule to the Container
        rule_id = add_rule(dids=[{'scope': scope, 'name': container}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='ALL', weight=None, lifetime=None, locked=False, subscription_id=None)[0]

        # Fake judge
        re_evaluator(once=True)

        assert(9 == get_rule(rule_id)['locks_ok_cnt'])

        detach_dids(scope, dataset, [files[0]])

        # Fake judge
        re_evaluator(once=True)

        assert(8 == get_rule(rule_id)['locks_ok_cnt'])
Esempio n. 14
0
def test_tpc(containerized_rses, root_account, test_scope, did_factory,
             rse_client, rule_client, artifact):
    if len(containerized_rses) < 2:
        pytest.skip(
            "TPC tests need at least 2 containerized rse's for execution}")
    rse1_name, rse1_id = containerized_rses[0]
    rse2_name, rse2_id = containerized_rses[1]

    base_file_name = generate_uuid()
    test_file = did_factory.upload_test_file(rse1_name,
                                             name=base_file_name + '.000',
                                             return_full_item=True)
    test_file_did_str = '%s:%s' % (test_file['did_scope'],
                                   test_file['did_name'])
    test_file_did = {'scope': test_scope, 'name': test_file['did_name']}
    test_file_name_hash = hashlib.md5(
        test_file_did_str.encode('utf-8')).hexdigest()
    test_file_expected_pfn = '%s/%s/%s/%s' % (
        test_file_did['scope'], test_file_name_hash[0:2],
        test_file_name_hash[2:4], test_file_did['name'])

    rse1_hostname = rse_client.get_protocols(rse1_name)[0]['hostname']
    rse2_hostname = rse_client.get_protocols(rse2_name)[0]['hostname']

    rule_id = add_rule(dids=[test_file_did],
                       account=root_account,
                       copies=1,
                       rse_expression=rse2_name,
                       grouping='NONE',
                       weight=None,
                       lifetime=None,
                       locked=False,
                       subscription_id=None)
    rule = rule_client.get_replication_rule(rule_id[0])

    re_evaluator(once=True)

    assert rule['locks_ok_cnt'] == 0
    assert rule['locks_replicating_cnt'] == 1

    [[_, [transfer_path]]
     ] = next_transfers_to_submit(rses=[rse1_id, rse2_id]).items()
    assert transfer_path[0].rws.rule_id == rule_id[0]
    src_url = transfer_path[0].legacy_sources[0][1]
    dest_url = transfer_path[0].dest_url
    check_url(src_url, rse1_hostname, test_file_expected_pfn)
    check_url(dest_url, rse2_hostname, test_file_expected_pfn)

    # Run Submitter
    submitter.submitter(once=True)

    # Get FTS transfer job id
    request = get_request_by_did(rse_id=rse2_id, **test_file_did)
    fts_transfer_id = request['external_id']

    # Check FTS transfer job
    assert fts_transfer_id is not None

    # Wait for the FTS transfer to finish
    fts_transfer_status = None
    for _ in range(MAX_POLL_WAIT_SECONDS):
        fts_transfer_status = poll_fts_transfer_status(fts_transfer_id)
        if fts_transfer_status not in ['SUBMITTED', 'ACTIVE']:
            break
        time.sleep(1)
    assert fts_transfer_status == 'FINISHED'

    poller.run(once=True, older_than=0)
    finisher.run(once=True)
    rule = rule_client.get_replication_rule(rule_id[0])
    assert rule['locks_ok_cnt'] == 1
    assert rule['locks_replicating_cnt'] == 0

    if artifact is not None:
        date = datetime.date.today().strftime("%Y-%m-%d")
        with open(artifact, 'w') as artifact_file:
            artifact_file.write(
                f"/var/log/fts3/{date}/{rse1_name.lower()}__{rse2_name.lower()}/*__{fts_transfer_id}"
            )
Esempio n. 15
0
def test_bb8_full_workflow(vo, root_account, jdoe_account, rse_factory,
                           mock_scope, did_factory):
    """BB8: Test the rebalance rule method"""
    config_core.set(section='bb8', option='allowed_accounts', value='jdoe')
    tot_rses = 4
    rses = [rse_factory.make_posix_rse() for _ in range(tot_rses)]
    rse1, rse1_id = rses[0]
    rse2, rse2_id = rses[1]
    rse3, rse3_id = rses[2]
    rse4, rse4_id = rses[3]

    # Add Tags
    # RSE 1 and 2 nmatch expression T1=true
    # RSE 3 and 4 nmatch expression T2=true
    T1 = tag_generator()
    T2 = tag_generator()
    add_rse_attribute(rse1_id, T1, True)
    add_rse_attribute(rse2_id, T1, True)
    add_rse_attribute(rse3_id, T2, True)
    add_rse_attribute(rse4_id, T2, True)

    # Add fake weights
    add_rse_attribute(rse1_id, "fakeweight", 10)
    add_rse_attribute(rse2_id, "fakeweight", 0)
    add_rse_attribute(rse3_id, "fakeweight", 0)
    add_rse_attribute(rse4_id, "fakeweight", 0)
    add_rse_attribute(rse1_id, "freespace", 1)
    add_rse_attribute(rse2_id, "freespace", 1)
    add_rse_attribute(rse3_id, "freespace", 1)
    add_rse_attribute(rse4_id, "freespace", 1)

    # Add quota
    set_local_account_limit(jdoe_account, rse1_id, -1)
    set_local_account_limit(jdoe_account, rse2_id, -1)
    set_local_account_limit(jdoe_account, rse3_id, -1)
    set_local_account_limit(jdoe_account, rse4_id, -1)

    set_local_account_limit(root_account, rse1_id, -1)
    set_local_account_limit(root_account, rse2_id, -1)
    set_local_account_limit(root_account, rse3_id, -1)
    set_local_account_limit(root_account, rse4_id, -1)

    # Invalid the cache because the result of parse_expression is cached
    REGION.invalidate()

    tot_datasets = 4
    # Create a list of datasets
    datasets = [did_factory.make_dataset() for _ in range(tot_datasets)]
    dsn = [dataset['name'] for dataset in datasets]

    rules = list()

    base_unit = 100000000000
    nb_files1 = 7
    nb_files2 = 5
    nb_files3 = 3
    nb_files4 = 2
    file_size = 1 * base_unit
    rule_to_rebalance = None

    # Add one secondary file
    files = create_files(1, mock_scope, rse1_id, bytes_=1)
    add_rule(dids=[{
        'scope': mock_scope,
        'name': files[0]['name']
    }],
             account=jdoe_account,
             copies=1,
             rse_expression=rse1,
             grouping='DATASET',
             weight=None,
             lifetime=-86400,
             locked=False,
             subscription_id=None)[0]
    for cnt in range(3, tot_rses):
        add_replicas(rses[cnt][1], files, jdoe_account)
        add_rule(dids=[{
            'scope': mock_scope,
            'name': files[0]['name']
        }],
                 account=jdoe_account,
                 copies=1,
                 rse_expression=rses[cnt][0],
                 grouping='DATASET',
                 weight=None,
                 lifetime=-86400,
                 locked=False,
                 subscription_id=None)[0]
    rule_cleaner(once=True)

    # Create dataset 1 of 800 GB and create a rule on RSE 1 and RSE 3
    files = create_files(nb_files1, mock_scope, rse1_id, bytes_=file_size)
    attach_dids(mock_scope, dsn[0], files, jdoe_account)

    rule_id = add_rule(dids=[{
        'scope': mock_scope,
        'name': dsn[0]
    }],
                       account=jdoe_account,
                       copies=1,
                       rse_expression=rse1,
                       grouping='DATASET',
                       weight=None,
                       lifetime=None,
                       locked=False,
                       subscription_id=None)[0]
    rules.append(rule_id)

    add_replicas(rse3_id, files, jdoe_account)
    rule_id = add_rule(dids=[{
        'scope': mock_scope,
        'name': dsn[0]
    }],
                       account=jdoe_account,
                       copies=1,
                       rse_expression=rse3,
                       grouping='DATASET',
                       weight=None,
                       lifetime=None,
                       locked=False,
                       subscription_id=None)[0]
    rules.append(rule_id)

    # Create dataset 2 of 500 GB and create a rule on RSE 1 and RSE 2
    files = create_files(nb_files2, mock_scope, rse1_id, bytes_=file_size)
    attach_dids(mock_scope, dsn[1], files, jdoe_account)

    rule_id = add_rule(dids=[{
        'scope': mock_scope,
        'name': dsn[1]
    }],
                       account=jdoe_account,
                       copies=1,
                       rse_expression=rse1,
                       grouping='DATASET',
                       weight=None,
                       lifetime=None,
                       locked=False,
                       subscription_id=None)[0]
    rules.append(rule_id)

    add_replicas(rse2_id, files, jdoe_account)
    rule_id = add_rule(dids=[{
        'scope': mock_scope,
        'name': dsn[1]
    }],
                       account=jdoe_account,
                       copies=1,
                       rse_expression=rse2,
                       grouping='DATASET',
                       weight=None,
                       lifetime=None,
                       locked=False,
                       subscription_id=None)[0]
    rules.append(rule_id)

    # Create dataset 3 of 300 GB and create a rule on RSE 1. The copy on RSE 3 is secondary
    files = create_files(nb_files3, mock_scope, rse1_id, bytes_=file_size)
    attach_dids(mock_scope, dsn[2], files, jdoe_account)

    rule_id = add_rule(dids=[{
        'scope': mock_scope,
        'name': dsn[2]
    }],
                       account=jdoe_account,
                       copies=1,
                       rse_expression=rse1,
                       grouping='DATASET',
                       weight=None,
                       lifetime=None,
                       locked=False,
                       subscription_id=None)[0]
    rule_to_rebalance = rule_id
    rules.append(rule_id)

    add_replicas(rse3_id, files, jdoe_account)
    rule_id = add_rule(dids=[{
        'scope': mock_scope,
        'name': dsn[2]
    }],
                       account=jdoe_account,
                       copies=1,
                       rse_expression=rse3,
                       grouping='DATASET',
                       weight=None,
                       lifetime=-86400,
                       locked=False,
                       subscription_id=None)[0]
    rule_cleaner(once=True)
    try:
        rule = get_rule(rule_id)
    except:
        pytest.raises(RuleNotFound, get_rule, rule_id)

    # Create dataset 4 of 200 GB and create a rule on RSE 3. The copy on RSE 2 is secondary
    files = create_files(nb_files4, mock_scope, rse3_id, bytes_=file_size)
    attach_dids(mock_scope, dsn[3], files, jdoe_account)

    rule_id = add_rule(dids=[{
        'scope': mock_scope,
        'name': dsn[3]
    }],
                       account=jdoe_account,
                       copies=1,
                       rse_expression=rse3,
                       grouping='DATASET',
                       weight=None,
                       lifetime=None,
                       locked=False,
                       subscription_id=None)[0]
    rules.append(rule_id)

    add_replicas(rse2_id, files, jdoe_account)
    rule_id = add_rule(dids=[{
        'scope': mock_scope,
        'name': dsn[3]
    }],
                       account=jdoe_account,
                       copies=1,
                       rse_expression=rse2,
                       grouping='DATASET',
                       weight=None,
                       lifetime=-86400,
                       locked=False,
                       subscription_id=None)[0]
    rule_cleaner(once=True)
    try:
        rule = get_rule(rule_id)
    except:
        pytest.raises(RuleNotFound, get_rule, rule_id)

    for dataset in dsn:
        set_status(mock_scope, dataset, open=False)

    for rse in rses:
        fill_rse_expired(rse[1])
        set_rse_usage(rse_id=rse[1],
                      source='min_free_space',
                      used=2 * base_unit,
                      free=2 * base_unit,
                      session=None)
        set_rse_usage(rse_id=rse[1],
                      source='storage',
                      used=15 * base_unit,
                      free=2 * base_unit,
                      session=None)
    set_rse_usage(rse_id=rse2_id,
                  source='min_free_space',
                  used=1 * base_unit,
                  free=1 * base_unit,
                  session=None)
    set_rse_usage(rse_id=rse2_id,
                  source='storage',
                  used=6 * base_unit,
                  free=5 * base_unit,
                  session=None)

    run_abacus(once=True, threads=1, fill_history_table=False, sleep_time=10)
    # Summary :
    # RSE 1 : 1500 GB primary + 1 B secondary
    tot_space = [
        src for src in get_rse_usage(rse1_id) if src['source'] == 'rucio'
    ][0]
    expired = [
        src for src in get_rse_usage(rse1_id) if src['source'] == 'expired'
    ][0]
    assert tot_space['used'] == (nb_files1 + nb_files2 +
                                 nb_files3) * file_size + 1
    assert expired['used'] == 1
    # RSE 2 : 500 GB primary + 100 GB secondary
    tot_space = [
        src for src in get_rse_usage(rse2_id) if src['source'] == 'rucio'
    ][0]
    expired = [
        src for src in get_rse_usage(rse2_id) if src['source'] == 'expired'
    ][0]
    assert tot_space['used'] == (nb_files2 + nb_files4) * file_size
    assert expired['used'] == nb_files4 * file_size
    # Total primary on T1=true : 2000 GB
    # Total secondary on T1=true : 200 GB
    # Ratio secondary / primary = 10  %
    # Ratio on RSE 1 : 0 %
    # Ratio on RSE 2 : 40 %

    # Now run BB8

    re_evaluator(once=True, sleep_time=30, did_limit=100)
    bb8_run(once=True,
            rse_expression='%s=true' % str(T1),
            move_subscriptions=False,
            use_dump=False,
            sleep_time=300,
            threads=1,
            dry_run=False)

    for rule_id in rules:
        rule = get_rule(rule_id)
        if rule_id != rule_to_rebalance:
            assert (rule['child_rule_id'] is None)
        else:
            assert (rule['child_rule_id'] is not None)
            assert (
                rule['expires_at'] <= datetime.utcnow() + timedelta(seconds=1)
            )  # timedelta needed to prevent failure due to rounding effects
            child_rule_id = rule['child_rule_id']
            child_rule = get_rule(child_rule_id)
            assert (child_rule['rse_expression'] == rse2)
            # For teardown, delete child rule
            update_rule(child_rule_id, {'lifetime': -86400})
    rule_cleaner(once=True)

    for dataset in dsn:
        set_metadata(mock_scope, dataset, 'lifetime', -86400)
    undertaker.run(once=True)
Esempio n. 16
0
def test_tpc(containerized_rses, root_account, test_scope, did_factory,
             rse_client, rule_client, artifact):
    if len(containerized_rses) < 2:
        pytest.skip(
            "TPC tests need at least 2 containerized rse's for execution}")
    rse1_name, rse1_id = containerized_rses[0]
    rse2_name, rse2_id = containerized_rses[1]

    base_file_name = generate_uuid()
    test_file = did_factory.upload_test_file(rse1_name,
                                             name=base_file_name + '.000',
                                             return_full_item=True)
    test_file_did_str = '%s:%s' % (test_file['did_scope'],
                                   test_file['did_name'])
    test_file_did = {'scope': test_scope, 'name': test_file['did_name']}
    test_file_name_hash = hashlib.md5(
        test_file_did_str.encode('utf-8')).hexdigest()
    test_file_expected_pfn = '%s/%s/%s/%s' % (
        test_file_did['scope'], test_file_name_hash[0:2],
        test_file_name_hash[2:4], test_file_did['name'])

    rse1_hostname = rse_client.get_protocols(rse1_name)[0]['hostname']
    rse2_hostname = rse_client.get_protocols(rse2_name)[0]['hostname']

    rule_id = add_rule(dids=[test_file_did],
                       account=root_account,
                       copies=1,
                       rse_expression=rse2_name,
                       grouping='NONE',
                       weight=None,
                       lifetime=None,
                       locked=False,
                       subscription_id=None)
    rule = rule_client.get_replication_rule(rule_id[0])

    re_evaluator(once=True)

    assert rule['locks_ok_cnt'] == 0
    assert rule['locks_replicating_cnt'] == 1

    transfer_requestss = get_transfer_requests_and_source_replicas(
        rses=[rse1_id, rse2_id])
    for transfer_requests in transfer_requestss:
        for transfer_request in transfer_requests:
            if transfer_requests[transfer_request]['rule_id'] == rule_id[0]:
                src_url = transfer_requests[transfer_request]['sources'][0][1]
                dest_url = transfer_requests[transfer_request]['dest_urls'][0]
                check_url(src_url, rse1_hostname, test_file_expected_pfn)
                check_url(dest_url, rse2_hostname, test_file_expected_pfn)

    # Run Submitter
    submitter.run(once=True)

    # Get FTS transfer job info
    fts_transfer_id, fts_transfer_status = list_fts_transfer()

    # Check FTS transfer job
    assert fts_transfer_id is not None
    assert fts_transfer_status in ['SUBMITTED', 'ACTIVE']

    fts_transfer_status = poll_fts_transfer_status(fts_transfer_id)
    assert fts_transfer_status == 'FINISHED'

    poller.run(once=True, older_than=0)
    finisher.run(once=True)
    rule = rule_client.get_replication_rule(rule_id[0])
    assert rule['locks_ok_cnt'] == 1
    assert rule['locks_replicating_cnt'] == 0

    if artifact is not None:
        date = datetime.date.today().strftime("%Y-%m-%d")
        with open(artifact, 'w') as artifact_file:
            artifact_file.write(
                f"/var/log/fts3/{date}/{rse1_name.lower()}__{rse2_name.lower()}/*__{fts_transfer_id}"
            )