Exemplo n.º 1
0
    def test_conveyor_submitter(self):
        """ CONVEYOR (DAEMON): Test the conveyor submitter daemon."""
        src = 'ATLASSCRATCHDISK://ccsrm.in2p3.fr:8443/srm/managerv2?SFN=/pnfs/in2p3.fr/data/atlas/atlasscratchdisk/rucio/'
        dest = 'ATLASSCRATCHDISK://dcache-se-atlas.desy.de:8443/srm/managerv2?SFN=/pnfs/desy.de/atlas/dq2/atlasscratchdisk/rucio/'
        request_transfer(loop=10, src=src, dst=dest, upload=False, same_src=True, same_dst=True)

        throttler.run(once=True)
        submitter.run(once=True)
        submitter.run(once=True)
        time.sleep(5)
        poller.run(once=True)
        finisher.run(once=True)
Exemplo n.º 2
0
def test_tpc(containerized_rses, root_account, test_scope, did_factory,
             rse_client, rule_client, artifact):
    if len(containerized_rses) < 2:
        pytest.skip(
            "TPC tests need at least 2 containerized rse's for execution}")
    rse1_name, rse1_id = containerized_rses[0]
    rse2_name, rse2_id = containerized_rses[1]

    base_file_name = generate_uuid()
    test_file = did_factory.upload_test_file(rse1_name,
                                             name=base_file_name + '.000',
                                             return_full_item=True)
    test_file_did_str = '%s:%s' % (test_file['did_scope'],
                                   test_file['did_name'])
    test_file_did = {'scope': test_scope, 'name': test_file['did_name']}
    test_file_name_hash = hashlib.md5(
        test_file_did_str.encode('utf-8')).hexdigest()
    test_file_expected_pfn = '%s/%s/%s/%s' % (
        test_file_did['scope'], test_file_name_hash[0:2],
        test_file_name_hash[2:4], test_file_did['name'])

    rse1_hostname = rse_client.get_protocols(rse1_name)[0]['hostname']
    rse2_hostname = rse_client.get_protocols(rse2_name)[0]['hostname']

    rule_id = add_rule(dids=[test_file_did],
                       account=root_account,
                       copies=1,
                       rse_expression=rse2_name,
                       grouping='NONE',
                       weight=None,
                       lifetime=None,
                       locked=False,
                       subscription_id=None)
    rule = rule_client.get_replication_rule(rule_id[0])

    re_evaluator(once=True)

    assert rule['locks_ok_cnt'] == 0
    assert rule['locks_replicating_cnt'] == 1

    [[_, [transfer_path]]
     ] = next_transfers_to_submit(rses=[rse1_id, rse2_id]).items()
    assert transfer_path[0].rws.rule_id == rule_id[0]
    src_url = transfer_path[0].legacy_sources[0][1]
    dest_url = transfer_path[0].dest_url
    check_url(src_url, rse1_hostname, test_file_expected_pfn)
    check_url(dest_url, rse2_hostname, test_file_expected_pfn)

    # Run Submitter
    submitter.submitter(once=True)

    # Get FTS transfer job id
    request = get_request_by_did(rse_id=rse2_id, **test_file_did)
    fts_transfer_id = request['external_id']

    # Check FTS transfer job
    assert fts_transfer_id is not None

    # Wait for the FTS transfer to finish
    fts_transfer_status = None
    for _ in range(MAX_POLL_WAIT_SECONDS):
        fts_transfer_status = poll_fts_transfer_status(fts_transfer_id)
        if fts_transfer_status not in ['SUBMITTED', 'ACTIVE']:
            break
        time.sleep(1)
    assert fts_transfer_status == 'FINISHED'

    poller.run(once=True, older_than=0)
    finisher.run(once=True)
    rule = rule_client.get_replication_rule(rule_id[0])
    assert rule['locks_ok_cnt'] == 1
    assert rule['locks_replicating_cnt'] == 0

    if artifact is not None:
        date = datetime.date.today().strftime("%Y-%m-%d")
        with open(artifact, 'w') as artifact_file:
            artifact_file.write(
                f"/var/log/fts3/{date}/{rse1_name.lower()}__{rse2_name.lower()}/*__{fts_transfer_id}"
            )
Exemplo n.º 3
0
def test_tpc(containerized_rses, root_account, test_scope, did_factory,
             rse_client, rule_client, artifact):
    if len(containerized_rses) < 2:
        pytest.skip(
            "TPC tests need at least 2 containerized rse's for execution}")
    rse1_name, rse1_id = containerized_rses[0]
    rse2_name, rse2_id = containerized_rses[1]

    base_file_name = generate_uuid()
    test_file = did_factory.upload_test_file(rse1_name,
                                             name=base_file_name + '.000',
                                             return_full_item=True)
    test_file_did_str = '%s:%s' % (test_file['did_scope'],
                                   test_file['did_name'])
    test_file_did = {'scope': test_scope, 'name': test_file['did_name']}
    test_file_name_hash = hashlib.md5(
        test_file_did_str.encode('utf-8')).hexdigest()
    test_file_expected_pfn = '%s/%s/%s/%s' % (
        test_file_did['scope'], test_file_name_hash[0:2],
        test_file_name_hash[2:4], test_file_did['name'])

    rse1_hostname = rse_client.get_protocols(rse1_name)[0]['hostname']
    rse2_hostname = rse_client.get_protocols(rse2_name)[0]['hostname']

    rule_id = add_rule(dids=[test_file_did],
                       account=root_account,
                       copies=1,
                       rse_expression=rse2_name,
                       grouping='NONE',
                       weight=None,
                       lifetime=None,
                       locked=False,
                       subscription_id=None)
    rule = rule_client.get_replication_rule(rule_id[0])

    re_evaluator(once=True)

    assert rule['locks_ok_cnt'] == 0
    assert rule['locks_replicating_cnt'] == 1

    transfer_requestss = get_transfer_requests_and_source_replicas(
        rses=[rse1_id, rse2_id])
    for transfer_requests in transfer_requestss:
        for transfer_request in transfer_requests:
            if transfer_requests[transfer_request]['rule_id'] == rule_id[0]:
                src_url = transfer_requests[transfer_request]['sources'][0][1]
                dest_url = transfer_requests[transfer_request]['dest_urls'][0]
                check_url(src_url, rse1_hostname, test_file_expected_pfn)
                check_url(dest_url, rse2_hostname, test_file_expected_pfn)

    # Run Submitter
    submitter.run(once=True)

    # Get FTS transfer job info
    fts_transfer_id, fts_transfer_status = list_fts_transfer()

    # Check FTS transfer job
    assert fts_transfer_id is not None
    assert fts_transfer_status in ['SUBMITTED', 'ACTIVE']

    fts_transfer_status = poll_fts_transfer_status(fts_transfer_id)
    assert fts_transfer_status == 'FINISHED'

    poller.run(once=True, older_than=0)
    finisher.run(once=True)
    rule = rule_client.get_replication_rule(rule_id[0])
    assert rule['locks_ok_cnt'] == 1
    assert rule['locks_replicating_cnt'] == 0

    if artifact is not None:
        date = datetime.date.today().strftime("%Y-%m-%d")
        with open(artifact, 'w') as artifact_file:
            artifact_file.write(
                f"/var/log/fts3/{date}/{rse1_name.lower()}__{rse2_name.lower()}/*__{fts_transfer_id}"
            )