Exemplo n.º 1
0
    def __init__(self):
        self.rse1 = rse_name_generator()
        self.rse2 = rse_name_generator()
        self.rse3 = rse_name_generator()
        self.rse4 = rse_name_generator()
        self.rse5 = rse_name_generator()

        self.rse1_id = rse.add_rse(self.rse1)
        self.rse2_id = rse.add_rse(self.rse2)
        self.rse3_id = rse.add_rse(self.rse3)
        self.rse4_id = rse.add_rse(self.rse4)
        self.rse5_id = rse.add_rse(self.rse5)

        # Add Attributes
        self.attribute = attribute_name_generator()

        rse.add_rse_attribute(self.rse1_id, self.attribute, "at")
        rse.add_rse_attribute(self.rse2_id, self.attribute, "de")
        rse.add_rse_attribute(self.rse3_id, self.attribute, "fr")
        rse.add_rse_attribute(self.rse4_id, self.attribute, "uk")
        rse.add_rse_attribute(self.rse5_id, self.attribute, "us")

        # Add Tags
        self.tag1 = tag_generator()
        self.tag2 = tag_generator()
        rse.add_rse_attribute(self.rse1_id, self.tag1, True)
        rse.add_rse_attribute(self.rse2_id, self.tag1, True)
        rse.add_rse_attribute(self.rse3_id, self.tag1, True)
        rse.add_rse_attribute(self.rse4_id, self.tag2, True)
        rse.add_rse_attribute(self.rse5_id, self.tag2, True)

        self.rse_client = RSEClient()
Exemplo n.º 2
0
def get_rse_client(rse, vo='def', **kwarg):
    '''
    get_rse_client
    '''
    from rucio.client.rseclient import RSEClient
    client = RSEClient(vo=vo)
    return client.get_rse(rse)
Exemplo n.º 3
0
 def setup(self):
     self.account_client = AccountClient()
     self.scope_client = ScopeClient()
     self.meta_client = MetaClient()
     self.did_client = DIDClient()
     self.replica_client = ReplicaClient()
     self.rse_client = RSEClient()
Exemplo n.º 4
0
    def setUp(self):
        self.account_client = AccountClient()
        self.rse_client = RSEClient()

        self.account = generate_uuid()[:10]
        self.rse = rse_name_generator()

        self.account_client.add_account(self.account, 'SERVICE',
                                        '*****@*****.**')
        self.rse_client.add_rse(self.rse)
Exemplo n.º 5
0
    def __lfns2pfns_client(self, lfns):
        """ Provides the path of a replica for non-deterministic sites. Will be assigned to get path by the __init__ method if neccessary.

            :param scope: list of DIDs

            :returns: dict with scope:name as keys and PFN as value (in case of errors the Rucio exception si assigned to the key)
        """
        client = RSEClient()

        lfns = [lfns] if isinstance(lfns, dict) else lfns
        lfn_query = ["%s:%s" % (lfn['scope'], lfn['name']) for lfn in lfns]
        return client.lfns2pfns(self.rse['rse'], lfn_query, scheme=self.attributes['scheme'])
Exemplo n.º 6
0
    def setUp(self):
        if config_get_bool('common', 'multi_vo', raise_exception=False, default=False):
            self.vo = {'vo': config_get('client', 'vo', raise_exception=False, default='tst')}
        else:
            self.vo = {}

        self.account_client = AccountClient()
        self.scope_client = ScopeClient()
        self.meta_client = MetaClient()
        self.did_client = DIDClient()
        self.replica_client = ReplicaClient()
        self.rse_client = RSEClient()
Exemplo n.º 7
0
    def test_rses_at_different_vos(self):
        """ MULTI VO (CLIENT): Test that RSEs from 2nd vo don't interfere """
        # Set up RSEs at two VOs
        rse_client = RSEClient()
        rse_str = ''.join(choice(ascii_uppercase) for x in range(10))
        tst = 'TST_%s' % rse_str
        new = 'NEW_%s' % rse_str
        shr = 'SHR_%s' % rse_str
        rse_client.add_rse(tst)
        rse_client.add_rse(shr)
        add_rse(new, 'root', **self.new_vo)
        shr_id_new_original = add_rse(shr, 'root', **self.new_vo)  # Accurate rse_id for shared RSE at 'new'

        # Check the cached rse-id from each VO does not interfere
        shr_id_tst = get_rse_id(shr, **self.vo)
        shr_id_new = get_rse_id(shr, **self.new_vo)
        assert_equal(shr_id_new, shr_id_new_original)
        assert_not_equal(shr_id_new, shr_id_tst)

        # Check that when listing RSEs we only get RSEs for our VO
        rse_list_tst = [r['rse'] for r in rse_client.list_rses()]
        rse_list_new = [r['rse'] for r in list_rses(filters={}, **self.new_vo)]
        assert_true(tst in rse_list_tst)
        assert_false(new in rse_list_tst)
        assert_true(shr in rse_list_tst)
        assert_false(tst in rse_list_new)
        assert_true(new in rse_list_new)
        assert_true(shr in rse_list_new)

        # Check the cached attribute-value results do not interfere and only give results from the appropriate VO
        attribute_value = generate_uuid()
        add_rse_attribute(new, 'test', attribute_value, 'root', **self.new_vo)
        rses_tst_1 = list(get_rses_with_attribute_value('test', attribute_value, 'test', **self.vo))
        rses_new_1 = list(get_rses_with_attribute_value('test', attribute_value, 'test', **self.new_vo))
        rses_tst_2 = list(get_rses_with_attribute_value('test', attribute_value, 'test', **self.vo))
        rses_new_2 = list(get_rses_with_attribute_value('test', attribute_value, 'test', **self.new_vo))
        assert_equal(len(rses_tst_1), 0)
        assert_not_equal(len(rses_new_1), 0)
        assert_equal(len(rses_tst_2), 0)
        assert_not_equal(len(rses_new_2), 0)

        # check parse_expression
        rses_tst_3 = parse_expression(shr, filter={'vo': self.vo['vo']})
        rses_tst_4 = parse_expression(tst, filter={'vo': self.vo['vo']})
        rses_new_3 = parse_expression(shr, filter={'vo': self.new_vo['vo']})
        with assert_raises(InvalidRSEExpression):
            parse_expression(tst, filter={'vo': self.new_vo['vo']})
        assert_equal(len(rses_tst_3), 1)
        assert_equal(shr_id_tst, rses_tst_3[0]['id'])
        assert_equal(len(rses_tst_4), 1)
        assert_equal(tst, rses_tst_4[0]['rse'])
        assert_equal(len(rses_new_3), 1)
        assert_equal(shr_id_new, rses_new_3[0]['id'])
Exemplo n.º 8
0
class TestRSEExpressionParserClient(object):
    def __init__(self):
        self.rse1 = rse_name_generator()
        self.rse2 = rse_name_generator()
        self.rse3 = rse_name_generator()
        self.rse4 = rse_name_generator()
        self.rse5 = rse_name_generator()

        self.rse1_id = rse.add_rse(self.rse1)
        self.rse2_id = rse.add_rse(self.rse2)
        self.rse3_id = rse.add_rse(self.rse3)
        self.rse4_id = rse.add_rse(self.rse4)
        self.rse5_id = rse.add_rse(self.rse5)

        # Add Attributes
        self.attribute = attribute_name_generator()

        rse.add_rse_attribute(self.rse1_id, self.attribute, "at")
        rse.add_rse_attribute(self.rse2_id, self.attribute, "de")
        rse.add_rse_attribute(self.rse3_id, self.attribute, "fr")
        rse.add_rse_attribute(self.rse4_id, self.attribute, "uk")
        rse.add_rse_attribute(self.rse5_id, self.attribute, "us")

        # Add Tags
        self.tag1 = tag_generator()
        self.tag2 = tag_generator()
        rse.add_rse_attribute(self.rse1_id, self.tag1, True)
        rse.add_rse_attribute(self.rse2_id, self.tag1, True)
        rse.add_rse_attribute(self.rse3_id, self.tag1, True)
        rse.add_rse_attribute(self.rse4_id, self.tag2, True)
        rse.add_rse_attribute(self.rse5_id, self.tag2, True)

        self.rse_client = RSEClient()

    def test_complicated_expression(self):
        """ RSE_EXPRESSION_PARSER (CLIENT) Test some complicated expression"""
        rses = [
            item['rse'] for item in
            self.rse_client.list_rses("(((((%s))))|%s=us)&%s|(%s=at|%s=de)" %
                                      (self.tag1, self.attribute, self.tag2,
                                       self.attribute, self.attribute))
        ]
        assert_equal(sorted(rses), sorted([self.rse1, self.rse2, self.rse5]))

    def test_complicated_expression_1(self):
        """ RSE_EXPRESSION_PARSER (CORE) Test some complicated expression 1"""
        rses = [
            item['rse'] for item in self.rse_client.list_rses(
                "(%s|%s)\\%s|%s&%s" %
                (self.tag1, self.tag2, self.tag2, self.tag2, self.tag1))
        ]
        assert_equal(sorted(rses), sorted([self.rse1, self.rse2, self.rse3]))
Exemplo n.º 9
0
class TestRSEExpressionParserClient(unittest.TestCase):

    def setUp(self):
        if config_get_bool('common', 'multi_vo', raise_exception=False, default=False):
            self.vo = {'vo': get_vo()}
        else:
            self.vo = {}

        self.rse1 = rse_name_generator()
        self.rse2 = rse_name_generator()
        self.rse3 = rse_name_generator()
        self.rse4 = rse_name_generator()
        self.rse5 = rse_name_generator()

        self.rse1_id = rse.add_rse(self.rse1, **self.vo)
        self.rse2_id = rse.add_rse(self.rse2, **self.vo)
        self.rse3_id = rse.add_rse(self.rse3, **self.vo)
        self.rse4_id = rse.add_rse(self.rse4, **self.vo)
        self.rse5_id = rse.add_rse(self.rse5, **self.vo)

        # Add Attributes
        self.attribute = attribute_name_generator()

        rse.add_rse_attribute(self.rse1_id, self.attribute, "at")
        rse.add_rse_attribute(self.rse2_id, self.attribute, "de")
        rse.add_rse_attribute(self.rse3_id, self.attribute, "fr")
        rse.add_rse_attribute(self.rse4_id, self.attribute, "uk")
        rse.add_rse_attribute(self.rse5_id, self.attribute, "us")

        # Add Tags
        self.tag1 = tag_generator()
        self.tag2 = tag_generator()
        rse.add_rse_attribute(self.rse1_id, self.tag1, True)
        rse.add_rse_attribute(self.rse2_id, self.tag1, True)
        rse.add_rse_attribute(self.rse3_id, self.tag1, True)
        rse.add_rse_attribute(self.rse4_id, self.tag2, True)
        rse.add_rse_attribute(self.rse5_id, self.tag2, True)

        self.rse_client = RSEClient()

    def test_complicated_expression(self):
        """ RSE_EXPRESSION_PARSER (CLIENT) Test some complicated expression"""
        rses = sorted([item['rse'] for item in self.rse_client.list_rses("(((((%s))))|%s=us)&%s|(%s=at|%s=de)" % (self.tag1, self.attribute, self.tag2, self.attribute, self.attribute))])
        expected = sorted([self.rse1, self.rse2, self.rse5])
        assert rses == expected

    def test_complicated_expression_1(self):
        """ RSE_EXPRESSION_PARSER (CORE) Test some complicated expression 1"""
        rses = sorted([item['rse'] for item in self.rse_client.list_rses("(%s|%s)\\%s|%s&%s" % (self.tag1, self.tag2, self.tag2, self.tag2, self.tag1))])
        expected = sorted([self.rse1, self.rse2, self.rse3])
        assert rses == expected
Exemplo n.º 10
0
 def setup(self):
     self.account_client = AccountClient()
     self.scope_client = ScopeClient()
     self.meta_client = MetaClient()
     self.did_client = DIDClient()
     self.replica_client = ReplicaClient()
     self.rse_client = RSEClient()
Exemplo n.º 11
0
    def setup(self):
        self.rse1 = rse_name_generator()
        self.rse2 = rse_name_generator()
        self.rse3 = rse_name_generator()
        self.rse4 = rse_name_generator()
        self.rse5 = rse_name_generator()

        self.rse1_id = rse.add_rse(self.rse1)
        self.rse2_id = rse.add_rse(self.rse2)
        self.rse3_id = rse.add_rse(self.rse3)
        self.rse4_id = rse.add_rse(self.rse4)
        self.rse5_id = rse.add_rse(self.rse5)

        # Add Attributes
        self.attribute = attribute_name_generator()

        rse.add_rse_attribute(self.rse1, self.attribute, "at")
        rse.add_rse_attribute(self.rse2, self.attribute, "de")
        rse.add_rse_attribute(self.rse3, self.attribute, "fr")
        rse.add_rse_attribute(self.rse4, self.attribute, "uk")
        rse.add_rse_attribute(self.rse5, self.attribute, "us")

        # Add Tags
        self.tag1 = tag_generator()
        self.tag2 = tag_generator()
        rse.add_rse_attribute(self.rse1, self.tag1, True)
        rse.add_rse_attribute(self.rse2, self.tag1, True)
        rse.add_rse_attribute(self.rse3, self.tag1, True)
        rse.add_rse_attribute(self.rse4, self.tag2, True)
        rse.add_rse_attribute(self.rse5, self.tag2, True)

        self.rse_client = RSEClient()
Exemplo n.º 12
0
    def setUp(self):
        if config_get_bool('common',
                           'multi_vo',
                           raise_exception=False,
                           default=False):
            self.vo = {
                'vo':
                config_get('client',
                           'vo',
                           raise_exception=False,
                           default='tst')
            }
        else:
            self.vo = {}

        self.rse1 = rse_name_generator()
        self.rse2 = rse_name_generator()
        self.rse3 = rse_name_generator()
        self.rse4 = rse_name_generator()
        self.rse5 = rse_name_generator()

        self.rse1_id = rse.add_rse(self.rse1, **self.vo)
        self.rse2_id = rse.add_rse(self.rse2, **self.vo)
        self.rse3_id = rse.add_rse(self.rse3, **self.vo)
        self.rse4_id = rse.add_rse(self.rse4, **self.vo)
        self.rse5_id = rse.add_rse(self.rse5, **self.vo)

        # Add Attributes
        self.attribute = attribute_name_generator()

        rse.add_rse_attribute(self.rse1_id, self.attribute, "at")
        rse.add_rse_attribute(self.rse2_id, self.attribute, "de")
        rse.add_rse_attribute(self.rse3_id, self.attribute, "fr")
        rse.add_rse_attribute(self.rse4_id, self.attribute, "uk")
        rse.add_rse_attribute(self.rse5_id, self.attribute, "us")

        # Add Tags
        self.tag1 = tag_generator()
        self.tag2 = tag_generator()
        rse.add_rse_attribute(self.rse1_id, self.tag1, True)
        rse.add_rse_attribute(self.rse2_id, self.tag1, True)
        rse.add_rse_attribute(self.rse3_id, self.tag1, True)
        rse.add_rse_attribute(self.rse4_id, self.tag2, True)
        rse.add_rse_attribute(self.rse5_id, self.tag2, True)

        self.rse_client = RSEClient()
Exemplo n.º 13
0
    def test_account_counters_at_different_vos(self):
        """ MULTI VO (CLIENT): Test that account counters from 2nd vo don't interfere """

        session = db_session.get_session()

        # add some RSEs to test create_counters_for_new_account
        rse_client = RSEClient()
        rse_str = ''.join(choice(ascii_uppercase) for x in range(10))
        tst_rse1 = 'TST1_%s' % rse_str
        new_rse1 = 'NEW1_%s' % rse_str
        rse_client.add_rse(tst_rse1)
        add_rse(new_rse1, 'root', **self.new_vo)

        # add an account - should have counters created for RSEs on the same VO
        usr_uuid = str(generate_uuid()).lower()[:16]
        new_acc_str = 'shr-%s' % usr_uuid
        new_acc = InternalAccount(new_acc_str, **self.new_vo)
        add_account(new_acc_str, 'USER', '*****@*****.**', 'root', **self.new_vo)

        query = session.query(models.AccountUsage.account, models.AccountUsage.rse_id).\
            distinct(models.AccountUsage.account, models.AccountUsage.rse_id).\
            filter_by(account=new_acc)
        acc_counters = list(query.all())

        assert_not_equal(0, len(acc_counters))
        for counter in acc_counters:
            rse_id = counter[1]
            vo = get_rse_vo(rse_id)
            assert_equal(vo, self.new_vo['vo'])

        # add an RSE - should have counters created for accounts on the same VO
        new_rse2 = 'NEW2_' + rse_str
        new_rse2_id = add_rse(new_rse2, 'root', **self.new_vo)

        query = session.query(models.AccountUsage.account, models.AccountUsage.rse_id).\
            distinct(models.AccountUsage.account, models.AccountUsage.rse_id).\
            filter_by(rse_id=new_rse2_id)
        rse_counters = list(query.all())

        assert_not_equal(0, len(rse_counters))
        for counter in rse_counters:
            account = counter[0]
            assert_equal(account.vo, self.new_vo['vo'])

        session.commit()
Exemplo n.º 14
0
class TestRSEExpressionParserClient():

    def setup(self):
        self.rse1 = rse_name_generator()
        self.rse2 = rse_name_generator()
        self.rse3 = rse_name_generator()
        self.rse4 = rse_name_generator()
        self.rse5 = rse_name_generator()

        self.rse1_id = rse.add_rse(self.rse1)
        self.rse2_id = rse.add_rse(self.rse2)
        self.rse3_id = rse.add_rse(self.rse3)
        self.rse4_id = rse.add_rse(self.rse4)
        self.rse5_id = rse.add_rse(self.rse5)

        # Add Attributes
        self.attribute = attribute_name_generator()

        rse.add_rse_attribute(self.rse1, self.attribute, "at")
        rse.add_rse_attribute(self.rse2, self.attribute, "de")
        rse.add_rse_attribute(self.rse3, self.attribute, "fr")
        rse.add_rse_attribute(self.rse4, self.attribute, "uk")
        rse.add_rse_attribute(self.rse5, self.attribute, "us")

        # Add Tags
        self.tag1 = tag_generator()
        self.tag2 = tag_generator()
        rse.add_rse_attribute(self.rse1, self.tag1, True)
        rse.add_rse_attribute(self.rse2, self.tag1, True)
        rse.add_rse_attribute(self.rse3, self.tag1, True)
        rse.add_rse_attribute(self.rse4, self.tag2, True)
        rse.add_rse_attribute(self.rse5, self.tag2, True)

        self.rse_client = RSEClient()

    def test_complicated_expression(self):
        """ RSE_EXPRESSION_PARSER (CLIENT) Test some complicated expression"""
        rses = [item['rse'] for item in self.rse_client.list_rses("(((((%s))))|%s=us)&%s|(%s=at|%s=de)" % (self.tag1, self.attribute, self.tag2, self.attribute, self.attribute))]
        assert_equal(sorted(rses), sorted([self.rse1, self.rse2, self.rse5]))

    def test_complicated_expression_1(self):
        """ RSE_EXPRESSION_PARSER (CORE) Test some complicated expression 1"""
        rses = [item['rse'] for item in self.rse_client.list_rses("(%s|%s)\\%s|%s&%s" % (self.tag1, self.tag2, self.tag2, self.tag2, self.tag1))]
        assert_equal(sorted(rses), sorted([self.rse1, self.rse2, self.rse3]))
Exemplo n.º 15
0
    def setupClass(self):
        if config_get_bool('common',
                           'multi_vo',
                           raise_exception=False,
                           default=False):
            self.vo = {
                'vo':
                config_get('client',
                           'vo',
                           raise_exception=False,
                           default='tst')
            }
        else:
            self.vo = {}

        self.rse_client = RSEClient()
        self.tmp_rse_name = rse_name_generator()
        self.rse_client.add_rse(self.tmp_rse_name, vo=self.vo)
        self.tmp_rse = self.rse_client.get_rse(self.tmp_rse_name)['id']
Exemplo n.º 16
0
    def setUpClass(cls):
        if config_get_bool('common',
                           'multi_vo',
                           raise_exception=False,
                           default=False):
            cls.vo = {'vo': get_vo()}
        else:
            cls.vo = {}

        cls.rse_client = RSEClient()
        cls.tmp_rse_name = rse_name_generator()
        cls.rse_client.add_rse(cls.tmp_rse_name, vo=cls.vo)
        cls.tmp_rse = cls.rse_client.get_rse(cls.tmp_rse_name)['id']
Exemplo n.º 17
0
    def test_automatix(self):
        """ MULTI VO (DAEMON): Test that automatix runs on a single VO """
        scope_client = ScopeClient()
        scope_uuid = str(generate_uuid()).lower()[:16]
        shr_scope = 'shr_%s' % scope_uuid
        scope_client.add_scope('root', shr_scope)
        add_scope(shr_scope, 'root', 'root', **self.new_vo)

        rse_client = RSEClient()
        rse_str = ''.join(choice(ascii_uppercase) for x in range(10))
        shr_rse = 'SHR_%s' % rse_str
        mock_protocol = {'scheme': 'MOCK',
                         'hostname': 'localhost',
                         'port': 123,
                         'prefix': '/test/automatix',
                         'impl': 'rucio.rse.protocols.mock.Default',
                         'domains': {
                             'lan': {'read': 1,
                                     'write': 1,
                                     'delete': 1},
                             'wan': {'read': 1,
                                     'write': 1,
                                     'delete': 1}}}
        rse_client.add_rse(shr_rse)
        rse_client.add_rse_attribute(rse=shr_rse, key='verify_checksum', value=False)
        rse_client.add_protocol(shr_rse, mock_protocol)
        add_rse(shr_rse, 'root', **self.new_vo)
        add_rse_attribute(rse=shr_rse, key='verify_checksum', value=False, issuer='root', **self.new_vo)
        add_protocol(rse=shr_rse, data=mock_protocol, issuer='root', **self.new_vo)

        automatix(sites=[shr_rse], inputfile='/opt/rucio/etc/automatix.json', sleep_time=30, account='root', once=True, scope=shr_scope)

        did_list_tst = list(DIDClient().list_dids(shr_scope, {}))
        did_list_new = list(list_dids(shr_scope, {}, **self.new_vo))
        assert_not_equal(len(did_list_tst), 0)
        assert_equal(len(did_list_new), 0)

        did_dicts = [{'scope': shr_scope, 'name': n} for n in did_list_tst]
        replicas_tst = list(ReplicaClient().list_replicas(did_dicts, rse_expression=shr_rse))
        replicas_new = list(list_replicas(did_dicts, rse_expression=shr_rse, **self.new_vo))
        assert_not_equal(len(replicas_tst), 0)
        assert_equal(len(replicas_new), 0)
Exemplo n.º 18
0
from rucio.client.rseclient import RSEClient
import uuid, sys, pprint

rse_name = sys.argv[1]
scheme = sys.argv[2]

c = RSEClient(account="ivm")
protocols = c.get_protocols(rse_name)

for proto in protocols:
    if (scheme == "-" or proto.get("scheme")
            == scheme) and "extended_attributes" in proto:
        print "Scheme:", proto["scheme"]
        print "TFC:"
        pprint.pprint(proto["extended_attributes"]["tfc"])
Exemplo n.º 19
0
class TestDIDClients:

    def setup(self):
        self.account_client = AccountClient()
        self.scope_client = ScopeClient()
        self.meta_client = MetaClient()
        self.did_client = DIDClient()
        self.replica_client = ReplicaClient()
        self.rse_client = RSEClient()

    def test_list_dids(self):
        """ DATA IDENTIFIERS (CLIENT): List dids by pattern."""
        tmp_scope = scope_name_generator()
        tmp_files = []
        tmp_files.append('file_a_1%s' % generate_uuid())
        tmp_files.append('file_a_2%s' % generate_uuid())
        tmp_files.append('file_b_1%s' % generate_uuid())
        tmp_rse = 'MOCK'

        self.scope_client.add_scope('jdoe', tmp_scope)
        for tmp_file in tmp_files:
            self.replica_client.add_replica(tmp_rse, tmp_scope, tmp_file, 1, '0cc737eb')

        results = []
        for result in self.did_client.list_dids(tmp_scope, {'name': 'file_a_*'}, type='file'):
            results.append(result)
        assert_equal(len(results), 2)
        results = []
        for result in self.did_client.list_dids(tmp_scope, {'name': 'file_a_1*'}, type='file'):
            results.append(result)
        assert_equal(len(results), 1)
        results = []
        for result in self.did_client.list_dids(tmp_scope, {'name': 'file_*_1*'}, type='file'):
            results.append(result)
        assert_equal(len(results), 2)
        results = []
        for result in self.did_client.list_dids(tmp_scope, {'name': 'file*'}, type='file'):
            results.append(result)
        assert_equal(len(results), 3)
        results = []

        filters = {'name': 'file*', 'created_after': datetime.utcnow() - timedelta(hours=1)}
        for result in self.did_client.list_dids(tmp_scope, filters):
            results.append(result)
        assert_equal(len(results), 0)
        with assert_raises(UnsupportedOperation):
            self.did_client.list_dids(tmp_scope, {'name': 'file*'}, type='whateverytype')

    def test_list_recursive(self):
        """ DATA IDENTIFIERS (CLIENT): List did recursive """
        # Create nested containers and datast
        tmp_scope_1 = 'list-did-recursive'
        tmp_scope_2 = 'list-did-recursive-2'
        self.scope_client.add_scope('root', tmp_scope_1)
        self.scope_client.add_scope('root', tmp_scope_2)

        tmp_container_1 = 'container_%s' % generate_uuid()
        self.did_client.add_container(scope=tmp_scope_1, name=tmp_container_1)

        tmp_container_2 = 'container_%s' % generate_uuid()
        self.did_client.add_container(scope=tmp_scope_1, name=tmp_container_2)

        tmp_dataset_1 = 'dataset_%s' % generate_uuid()
        self.did_client.add_dataset(scope=tmp_scope_2, name=tmp_dataset_1)

        tmp_dataset_2 = 'dataset_%s' % generate_uuid()
        self.did_client.add_dataset(scope=tmp_scope_1, name=tmp_dataset_2)

        self.did_client.attach_dids(scope=tmp_scope_1, name=tmp_container_1, dids=[{'scope': tmp_scope_2, 'name': tmp_dataset_1}])
        self.did_client.attach_dids(scope=tmp_scope_1, name=tmp_container_2, dids=[{'scope': tmp_scope_1, 'name': tmp_dataset_2}])
        self.did_client.attach_dids(scope=tmp_scope_1, name=tmp_container_1, dids=[{'scope': tmp_scope_1, 'name': tmp_container_2}])

        # List DIDs not recursive - only the first container is expected
        dids = [str(did) for did in self.did_client.list_dids(scope=tmp_scope_1, recursive=False, type='all', filters={'name': tmp_container_1})]
        assert_equal(dids, [tmp_container_1])

        # List DIDs recursive - first container and all attached collections are expected
        dids = [str(did) for did in self.did_client.list_dids(scope=tmp_scope_1, recursive=True, type='all', filters={'name': tmp_container_1})]
        assert_true(tmp_container_1 in dids)
        assert_true(tmp_container_2 in dids)
        assert_true(tmp_dataset_1 in dids)
        assert_true(tmp_dataset_2 in dids)
        assert_equal(len(dids), 4)

        # List DIDs recursive - only containers are expected
        dids = [str(did) for did in self.did_client.list_dids(scope=tmp_scope_1, recursive=True, type='container', filters={'name': tmp_container_1})]
        assert_true(tmp_container_1 in dids)
        assert_true(tmp_container_2 in dids)
        assert_true(tmp_dataset_1 not in dids)
        assert_true(tmp_dataset_2 not in dids)
        assert_equal(len(dids), 2)

    def test_list_by_length(self):
        """ DATA IDENTIFIERS (CLIENT): List did with length """
        tmp_scope = 'mock'

        tmp_dsn = 'dsn_%s' % generate_uuid()
        self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn)

        dids = self.did_client.list_dids(tmp_scope, {'length.gt': 0})
        results = []
        for d in dids:
            results.append(d)
        assert_not_equal(len(results), 0)

        dids = self.did_client.list_dids(tmp_scope, {'length.gt': -1, 'length.lt': 1})
        results = []
        for d in dids:
            results.append(d)
        assert_equal(len(results), 0)

        dids = self.did_client.list_dids(tmp_scope, {'length': 0})
        results = []
        for d in dids:
            results.append(d)
        assert_equal(len(results), 0)

    def test_list_by_metadata(self):
        """ DATA IDENTIFIERS (CLIENT): List did with metadata"""
        dsns = []
        tmp_scope = 'mock'
        tmp_dsn1 = 'dsn_%s' % generate_uuid()
        dsns.append(tmp_dsn1)

        dataset_meta = {'project': 'data12_8TeV',
                        'run_number': 400000,
                        'stream_name': 'physics_CosmicCalo',
                        'prod_step': 'merge',
                        'datatype': 'NTUP_TRIG',
                        'version': 'f392_m920',
                        }
        self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn1, meta=dataset_meta)
        tmp_dsn2 = 'dsn_%s' % generate_uuid()
        dsns.append(tmp_dsn2)
        dataset_meta['run_number'] = 400001
        self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn2, meta=dataset_meta)

        tmp_dsn3 = 'dsn_%s' % generate_uuid()
        dsns.append(tmp_dsn3)
        dataset_meta['stream_name'] = 'physics_Egamma'
        dataset_meta['datatype'] = 'NTUP_SMWZ'
        self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn3, meta=dataset_meta)

        dids = self.did_client.list_dids(tmp_scope, {'project': 'data12_8TeV', 'version': 'f392_m920'})
        results = []
        for d in dids:
            results.append(d)
        for dsn in dsns:
            assert_in(dsn, results)
        dsns.remove(tmp_dsn1)

        dids = self.did_client.list_dids(tmp_scope, {'project': 'data12_8TeV', 'run_number': 400001})
        results = []
        for d in dids:
            results.append(d)
        for dsn in dsns:
            assert_in(dsn, results)
        dsns.remove(tmp_dsn2)

        dids = self.did_client.list_dids(tmp_scope, {'project': 'data12_8TeV', 'stream_name': 'physics_Egamma', 'datatype': 'NTUP_SMWZ'})
        results = []
        for d in dids:
            results.append(d)
        for dsn in dsns:
            assert_in(dsn, results)

        with assert_raises(KeyNotFound):
            self.did_client.list_dids(tmp_scope, {'NotReallyAKey': 'NotReallyAValue'})

    def test_add_did(self):
        """ DATA IDENTIFIERS (CLIENT): Add, populate, list did content and create a sample"""
        tmp_scope = 'mock'
        tmp_rse = 'MOCK'
        tmp_dsn = 'dsn_%s' % generate_uuid()
        root = InternalAccount('root')
        set_local_account_limit(root, get_rse_id('MOCK'), -1)
        set_local_account_limit(root, get_rse_id('CERN-PROD_TZERO'), -1)

        # PFN example: rfio://castoratlas.cern.ch/castor/cern.ch/grid/atlas/tzero/xx/xx/xx/filename
        dataset_meta = {'project': 'data13_hip',
                        'run_number': 300000,
                        'stream_name': 'physics_CosmicCalo',
                        'prod_step': 'merge',
                        'datatype': 'NTUP_TRIG',
                        'version': 'f392_m927',
                        }
        rules = [{'copies': 1, 'rse_expression': 'MOCK', 'account': 'root'}]

        with assert_raises(ScopeNotFound):
            self.did_client.add_dataset(scope='Nimportnawak', name=tmp_dsn, statuses={'monotonic': True}, meta=dataset_meta, rules=rules)

        files = [{'scope': tmp_scope, 'name': 'lfn.%(tmp_dsn)s.' % locals() + str(generate_uuid()), 'bytes': 724963570, 'adler32': '0cc737eb'}, ]
        with assert_raises(DataIdentifierNotFound):
            self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn, statuses={'monotonic': True}, meta=dataset_meta, rules=rules, files=files)

        with assert_raises(DataIdentifierNotFound):
            self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dsn, files=files)

        files = []
        for i in range(5):
            lfn = 'lfn.%(tmp_dsn)s.' % locals() + str(generate_uuid())
            pfn = 'mock://localhost/tmp/rucio_rse/%(project)s/%(version)s/%(prod_step)s' % dataset_meta
            # it doesn't work with mock: TBF
            # pfn = 'srm://mock2.com:2880/pnfs/rucio/disk-only/scratchdisk/rucio_tests/%(project)s/%(version)s/%(prod_step)s' % dataset_meta
            pfn += '%(tmp_dsn)s/%(lfn)s' % locals()
            file_meta = {'guid': str(generate_uuid()), 'events': 10}
            files.append({'scope': tmp_scope, 'name': lfn,
                          'bytes': 724963570, 'adler32': '0cc737eb',
                          'pfn': pfn, 'meta': file_meta})

        rules = [{'copies': 1, 'rse_expression': 'CERN-PROD_TZERO', 'lifetime': timedelta(days=2), 'account': 'root'}]

        with assert_raises(InvalidPath):
            self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn, statuses={'monotonic': True}, meta=dataset_meta, rules=rules, files=files, rse=tmp_rse)

        files_without_pfn = [{'scope': i['scope'], 'name': i['name'], 'bytes': i['bytes'], 'adler32': i['adler32'], 'meta': i['meta']} for i in files]
        self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn, statuses={'monotonic': True}, meta=dataset_meta, rules=rules, files=files_without_pfn, rse=tmp_rse)

        with assert_raises(DataIdentifierAlreadyExists):
            self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn, files=files, rse=tmp_rse)

        files = []
        for i in range(5):
            lfn = '%(tmp_dsn)s.' % locals() + str(generate_uuid())
            pfn = 'mock://localhost/tmp/rucio_rse/%(project)s/%(version)s/%(prod_step)s' % dataset_meta
            # it doesn't work with mock: TBF
            # pfn = 'srm://mock2.com:2880/pnfs/rucio/disk-only/scratchdisk/rucio_tests/%(project)s/%(version)s/%(prod_step)s' % dataset_meta
            pfn += '%(tmp_dsn)s/%(lfn)s' % locals()
            file_meta = {'guid': str(generate_uuid()), 'events': 100}
            files.append({'scope': tmp_scope, 'name': lfn,
                          'bytes': 724963570, 'adler32': '0cc737eb',
                          'pfn': pfn, 'meta': file_meta})
        rules = [{'copies': 1, 'rse_expression': 'CERN-PROD_TZERO', 'lifetime': timedelta(days=2)}]

        with assert_raises(InvalidPath):
            self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dsn, files=files, rse=tmp_rse)
        files_without_pfn = [{'scope': i['scope'], 'name': i['name'], 'bytes': i['bytes'], 'adler32': i['adler32'], 'meta': i['meta']} for i in files]
        self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dsn, files=files_without_pfn, rse=tmp_rse)

        self.did_client.close(scope=tmp_scope, name=tmp_dsn)

        tmp_dsn_output = 'dsn_%s' % generate_uuid()
        self.did_client.create_did_sample(input_scope=tmp_scope, input_name=tmp_dsn, output_scope=tmp_scope, output_name=tmp_dsn_output, nbfiles=2)
        files = [f for f in self.did_client.list_files(scope=tmp_scope, name=tmp_dsn_output)]
        assert_equal(len(files), 2)

    def test_attach_dids_to_dids(self):
        """ DATA IDENTIFIERS (CLIENT): Attach dids to dids"""
        tmp_scope = 'mock'
        tmp_rse = 'MOCK'
        nb_datasets = 5
        nb_files = 5
        attachments, dsns = list(), list()
        guid_to_query = None
        dsn = {}
        for i in range(nb_datasets):
            attachment = {}
            attachment['scope'] = tmp_scope
            attachment['name'] = 'dsn.%s' % str(generate_uuid())
            attachment['rse'] = tmp_rse
            files = []
            for i in range(nb_files):
                files.append({'scope': tmp_scope, 'name': 'lfn.%s' % str(generate_uuid()),
                              'bytes': 724963570, 'adler32': '0cc737eb',
                              'meta': {'guid': str(generate_uuid()), 'events': 100}})
            attachment['dids'] = files
            guid_to_query = files[0]['meta']['guid']
            dsn = {'scope': tmp_scope, 'name': attachment['name']}
            dsns.append(dsn)
            attachments.append(attachment)

        self.did_client.add_datasets(dsns=dsns)
        self.did_client.attach_dids_to_dids(attachments=attachments)
        dsns_l = [i for i in self.did_client.get_dataset_by_guid(guid_to_query)]

        assert_equal([dsn], dsns_l)

        cnt_name = 'cnt_%s' % generate_uuid()
        self.did_client.add_container(scope='mock', name=cnt_name)
        with assert_raises(UnsupportedOperation):
            self.did_client.attach_dids_to_dids([{'scope': 'mock', 'name': cnt_name, 'rse': tmp_rse, 'dids': attachment['dids']}])

    def test_add_files_to_datasets(self):
        """ DATA IDENTIFIERS (CLIENT): Add files to Datasets"""
        tmp_scope = 'mock'
        tmp_rse = 'MOCK'
        dsn1 = 'dsn.%s' % str(generate_uuid())
        dsn2 = 'dsn.%s' % str(generate_uuid())
        meta = {'transient': True}
        files1, files2, nb_files = [], [], 5
        for i in range(nb_files):
            files1.append({'scope': tmp_scope, 'name': 'lfn.%s' % str(generate_uuid()),
                           'bytes': 724963570, 'adler32': '0cc737eb',
                           'meta': {'guid': str(generate_uuid()), 'events': 100}})
            files2.append({'scope': tmp_scope, 'name': 'lfn.%s' % str(generate_uuid()),
                           'bytes': 724963570, 'adler32': '0cc737eb',
                           'meta': {'guid': str(generate_uuid()), 'events': 100}})

        self.did_client.add_dataset(scope=tmp_scope, name=dsn1, files=files1,
                                    rse=tmp_rse, meta=meta)
        self.did_client.add_dataset(scope=tmp_scope, name=dsn2, files=files2,
                                    rse=tmp_rse, meta=meta)

        attachments = [{'scope': tmp_scope, 'name': dsn1, 'dids': files2, 'rse': tmp_rse},
                       {'scope': tmp_scope, 'name': dsn2, 'dids': files1, 'rse': tmp_rse}]

        self.did_client.add_files_to_datasets(attachments)

        files = [f for f in self.did_client.list_files(scope=tmp_scope, name=dsn1)]
        assert_equal(len(files), 10)

        with assert_raises(FileAlreadyExists):
            self.did_client.add_files_to_datasets(attachments)

        for attachment in attachments:
            for i in range(nb_files):
                attachment['dids'].append({'scope': tmp_scope,
                                           'name': 'lfn.%s' % str(generate_uuid()),
                                           'bytes': 724963570,
                                           'adler32': '0cc737eb',
                                           'meta': {'guid': str(generate_uuid()),
                                                    'events': 100}})

        self.did_client.add_files_to_datasets(attachments, ignore_duplicate=True)

        files = [f for f in self.did_client.list_files(scope=tmp_scope, name=dsn1)]
        assert_equal(len(files), 15)

        # Corrupt meta-data
        files = []
        for attachment in attachments:
            for file in attachment['dids']:
                file['bytes'] = 1000
                break

        with assert_raises(FileConsistencyMismatch):
            self.did_client.add_files_to_datasets(attachments, ignore_duplicate=True)

    def test_add_dataset(self):
        """ DATA IDENTIFIERS (CLIENT): Add dataset """
        tmp_scope = 'mock'
        tmp_dsn = 'dsn_%s' % generate_uuid()

        self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn, meta={'project': 'data13_hip'})

        did = self.did_client.get_did(tmp_scope, tmp_dsn)

        assert_equal(did['scope'], tmp_scope)
        assert_equal(did['name'], tmp_dsn)

        with assert_raises(DataIdentifierNotFound):
            self.did_client.get_did('i_dont_exist', 'neither_do_i')

    def test_add_datasets(self):
        """ DATA IDENTIFIERS (CLIENT): Bulk add datasets """
        tmp_scope = 'mock'
        dsns = list()
        for i in range(500):
            tmp_dsn = {'name': 'dsn_%s' % generate_uuid(), 'scope': tmp_scope, 'meta': {'project': 'data13_hip'}}
            dsns.append(tmp_dsn)
        self.did_client.add_datasets(dsns)

    def test_exists(self):
        """ DATA IDENTIFIERS (CLIENT): Check if data identifier exists """
        tmp_scope = 'mock'
        tmp_file = 'file_%s' % generate_uuid()
        tmp_rse = 'MOCK'

        self.replica_client.add_replica(tmp_rse, tmp_scope, tmp_file, 1, '0cc737eb')

        did = self.did_client.get_did(tmp_scope, tmp_file)

        assert_equal(did['scope'], tmp_scope)
        assert_equal(did['name'], tmp_file)

        with assert_raises(DataIdentifierNotFound):
            self.did_client.get_did('i_dont_exist', 'neither_do_i')

    def test_did_hierarchy(self):
        """ DATA IDENTIFIERS (CLIENT): Check did hierarchy rule """

        account = 'jdoe'
        rse = 'MOCK'
        scope = scope_name_generator()
        file = ['file_%s' % generate_uuid() for i in range(10)]
        dst = ['dst_%s' % generate_uuid() for i in range(4)]
        cnt = ['cnt_%s' % generate_uuid() for i in range(4)]

        self.scope_client.add_scope(account, scope)

        for i in range(10):
            self.replica_client.add_replica(rse, scope, file[i], 1, '0cc737eb')
        for i in range(4):
            self.did_client.add_did(scope, dst[i], 'DATASET', statuses=None, meta=None, rules=None)
        for i in range(4):
            self.did_client.add_did(scope, cnt[i], 'CONTAINER', statuses=None, meta=None, rules=None)

        for i in range(4):
            self.did_client.add_files_to_dataset(scope, dst[i], [{'scope': scope, 'name': file[2 * i], 'bytes': 1, 'adler32': '0cc737eb'},
                                                                 {'scope': scope, 'name': file[2 * i + 1], 'bytes': 1, 'adler32': '0cc737eb'}])

        self.did_client.add_containers_to_container(scope, cnt[1], [{'scope': scope, 'name': cnt[2]}, {'scope': scope, 'name': cnt[3]}])
        self.did_client.add_datasets_to_container(scope, cnt[0], [{'scope': scope, 'name': dst[1]}, {'scope': scope, 'name': dst[2]}])

        result = self.did_client.scope_list(scope, recursive=True)
        for r in result:
            pass
            # TODO: fix, fix, fix
            # if r['name'] == cnt[1]:
            #    assert_equal(r['type'], 'container')
            #    assert_equal(r['level'], 0)
            # if (r['name'] == cnt[0]) or (r['name'] == dst[0]) or (r['name'] == file[8]) or (r['name'] == file[9]):
            #    assert_equal(r['level'], 0)
            # else:
            #     assert_equal(r['level'], 1)

    def test_detach_did(self):
        """ DATA IDENTIFIERS (CLIENT): Detach dids from a did"""

        account = 'jdoe'
        rse = 'MOCK'
        scope = scope_name_generator()
        file = ['file_%s' % generate_uuid() for i in range(10)]
        dst = ['dst_%s' % generate_uuid() for i in range(5)]
        cnt = ['cnt_%s' % generate_uuid() for i in range(2)]

        self.scope_client.add_scope(account, scope)

        for i in range(10):
            self.replica_client.add_replica(rse, scope, file[i], 1, '0cc737eb')
        for i in range(5):
            self.did_client.add_dataset(scope, dst[i], statuses=None, meta=None, rules=None)
        for i in range(2):
            self.did_client.add_container(scope, cnt[i], statuses=None, meta=None, rules=None)

        for i in range(5):
            self.did_client.add_files_to_dataset(scope, dst[i], [{'scope': scope, 'name': file[2 * i], 'bytes': 1, 'adler32': '0cc737eb'},
                                                                 {'scope': scope, 'name': file[2 * i + 1], 'bytes': 1, 'adler32': '0cc737eb'}])

        self.did_client.add_containers_to_container(scope, cnt[1], [{'scope': scope, 'name': dst[2]}, {'scope': scope, 'name': dst[3]}])

        with assert_raises(UnsupportedOperation):
            self.did_client.add_datasets_to_container(scope, cnt[0], [{'scope': scope, 'name': dst[1]}, {'scope': scope, 'name': cnt[1]}])

        self.did_client.add_datasets_to_container(scope, cnt[0], [{'scope': scope, 'name': dst[1]}, {'scope': scope, 'name': dst[2]}])

        self.did_client.detach_dids(scope, cnt[0], [{'scope': scope, 'name': dst[1]}])
        self.did_client.detach_dids(scope, dst[3], [{'scope': scope, 'name': file[6]}, {'scope': scope, 'name': file[7]}])
        result = self.did_client.scope_list(scope, recursive=True)
        for r in result:
            if r['name'] == dst[1]:
                assert_equal(r['level'], 0)
            if r['type'] == 'file':
                if (r['name'] in file[6:9]):
                    assert_equal(r['level'], 0)
                else:
                    assert_not_equal(r['level'], 0)

        with assert_raises(UnsupportedOperation):
            self.did_client.detach_dids(scope=scope, name=cnt[0], dids=[{'scope': scope, 'name': cnt[0]}])

        self.did_client.close(scope, dst[4])
        metadata = self.did_client.get_metadata(scope, dst[4])
        i_bytes, i_length = metadata['bytes'], metadata['length']
        metadata = self.did_client.get_metadata(scope, file[8])
        file1_bytes = metadata['bytes']
        metadata = self.did_client.get_metadata(scope, file[9])
        file2_bytes = metadata['bytes']
        self.did_client.detach_dids(scope, dst[4], [{'scope': scope, 'name': file[8]}, {'scope': scope, 'name': file[9]}])
        metadata = self.did_client.get_metadata(scope, dst[4])
        f_bytes, f_length = metadata['bytes'], metadata['length']
        assert_equal(i_bytes, f_bytes + file1_bytes + file2_bytes)
        assert_equal(i_length, f_length + 1 + 1)

    def test_scope_list(self):
        """ DATA IDENTIFIERS (CLIENT): Add, aggregate, and list data identifiers in a scope """

        # create some dummy data
        self.tmp_accounts = ['jdoe' for i in range(3)]
        self.tmp_scopes = [scope_name_generator() for i in range(3)]
        self.tmp_rses = [rse_name_generator() for i in range(3)]
        self.tmp_files = ['file_%s' % generate_uuid() for i in range(3)]
        self.tmp_datasets = ['dataset_%s' % generate_uuid() for i in range(3)]
        self.tmp_containers = ['container_%s' % generate_uuid() for i in range(3)]

        # add dummy data to the catalogue
        for i in range(3):
            self.scope_client.add_scope(self.tmp_accounts[i], self.tmp_scopes[i])
            self.rse_client.add_rse(self.tmp_rses[i])
            self.replica_client.add_replica(self.tmp_rses[i], self.tmp_scopes[i], self.tmp_files[i], 1, '0cc737eb')

        # put files in datasets
        for i in range(3):
            for j in range(3):
                files = [{'scope': self.tmp_scopes[j], 'name': self.tmp_files[j], 'bytes': 1, 'adler32': '0cc737eb'}]
                self.did_client.add_dataset(self.tmp_scopes[i], self.tmp_datasets[j])
                self.did_client.add_files_to_dataset(self.tmp_scopes[i], self.tmp_datasets[j], files)

        # put datasets in containers
        for i in range(3):
            for j in range(3):
                datasets = [{'scope': self.tmp_scopes[j], 'name': self.tmp_datasets[j]}]
                self.did_client.add_container(self.tmp_scopes[i], self.tmp_containers[j])
                self.did_client.add_datasets_to_container(self.tmp_scopes[i], self.tmp_containers[j], datasets)

        # reverse check if everything is in order
        for i in range(3):
            result = self.did_client.scope_list(self.tmp_scopes[i], recursive=True)

            r_topdids = []
            r_otherscopedids = []
            r_scope = []
            for r in result:
                if r['level'] == 0:
                    r_topdids.append(r['scope'] + ':' + r['name'])
                    r_scope.append(r['scope'])
                if r['scope'] != self.tmp_scopes[i]:
                    r_otherscopedids.append(r['scope'] + ':' + r['name'])
                    assert_in(r['level'], [1, 2])

            for j in range(3):
                assert_equal(self.tmp_scopes[i], r_scope[j])
                if j != i:
                    assert_in(self.tmp_scopes[j] + ':' + self.tmp_files[j], r_otherscopedids)
            assert_not_in(self.tmp_scopes[i] + ':' + self.tmp_files[i], r_topdids)

    def test_get_did(self):
        """ DATA IDENTIFIERS (CLIENT): add a new data identifier and try to retrieve it back"""
        rse = 'MOCK'
        scope = 'mock'
        file = generate_uuid()
        dsn = generate_uuid()

        self.replica_client.add_replica(rse, scope, file, 1, '0cc737eb')

        did = self.did_client.get_did(scope, file)

        assert_equal(did['scope'], scope)
        assert_equal(did['name'], file)

        self.did_client.add_dataset(scope=scope, name=dsn, lifetime=10000000)
        did2 = self.did_client.get_did(scope, dsn)
        assert_equal(type(did2['expired_at']), datetime)

    def test_get_meta(self):
        """ DATA IDENTIFIERS (CLIENT): add a new meta data for an identifier and try to retrieve it back"""
        rse = 'MOCK'
        scope = 'mock'
        file = generate_uuid()
        keys = ['project', 'run_number']
        values = ['data13_hip', 12345678]

        self.replica_client.add_replica(rse, scope, file, 1, '0cc737eb')
        for i in range(2):
            self.did_client.set_metadata(scope, file, keys[i], values[i])

        meta = self.did_client.get_metadata(scope, file)

        for i in range(2):
            assert_equal(meta[keys[i]], values[i])

    def test_list_content(self):
        """ DATA IDENTIFIERS (CLIENT): test to list contents for an identifier"""
        rse = 'MOCK'
        scope = 'mock'
        nbfiles = 5
        dataset1 = generate_uuid()
        dataset2 = generate_uuid()
        container = generate_uuid()
        files1 = [{'scope': scope, 'name': generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb'} for i in range(nbfiles)]
        files2 = [{'scope': scope, 'name': generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb'} for i in range(nbfiles)]

        self.did_client.add_dataset(scope, dataset1)

        with assert_raises(DataIdentifierAlreadyExists):
            self.did_client.add_dataset(scope, dataset1)

        self.did_client.add_files_to_dataset(scope, dataset1, files1, rse=rse)

        self.did_client.add_dataset(scope, dataset2)
        self.did_client.add_files_to_dataset(scope, dataset2, files2, rse=rse)

        self.did_client.add_container(scope, container)
        datasets = [{'scope': scope, 'name': dataset1}, {'scope': scope, 'name': dataset2}]
        self.did_client.add_datasets_to_container(scope, container, datasets)

        contents = self.did_client.list_content(scope, container)

        datasets_s = [d['name'] for d in contents]
        assert_in(dataset1, datasets_s)
        assert_in(dataset2, datasets_s)

    def test_list_files(self):
        """ DATA IDENTIFIERS (CLIENT): List files for a container"""
        rse = 'MOCK'
        scope = 'mock'
        dataset1 = generate_uuid()
        dataset2 = generate_uuid()
        container = generate_uuid()
        files1 = []
        files2 = []
        for i in range(10):
            files1.append({'scope': scope, 'name': generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb'})
            files2.append({'scope': scope, 'name': generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb'})

        for i in range(10):
            self.replica_client.add_replica(rse, scope, files1[i]['name'], 1, '0cc737eb')
            self.replica_client.add_replica(rse, scope, files2[i]['name'], 1, '0cc737eb')

        self.did_client.add_dataset(scope, dataset1)
        self.did_client.add_files_to_dataset(scope, dataset1, files1)

        self.did_client.add_dataset(scope, dataset2)
        self.did_client.add_files_to_dataset(scope, dataset2, files2)
        datasets = [{'scope': scope, 'name': dataset1}, {'scope': scope, 'name': dataset2}]
        self.did_client.add_container(scope, container)
        self.did_client.add_datasets_to_container(scope, container, datasets)

        # List file content
        content = self.did_client.list_files(scope, files1[i]['name'])
        assert_true(content is not None)
        for d in content:
            assert_true(d['name'] == files1[i]['name'])

        # List container content
        for d in [{'name': x['name'], 'scope': x['scope'], 'bytes': x['bytes'], 'adler32': x['adler32']} for x in self.did_client.list_files(scope, container)]:
            assert_in(d, files1 + files2)

        # List non-existing data identifier content
        with assert_raises(DataIdentifierNotFound):
            self.did_client.list_files(scope, 'Nimportnawak')

    def test_list_replicas(self):
        """ DATA IDENTIFIERS (CLIENT): List replicas for a container"""
        rse = 'MOCK'
        scope = 'mock'
        dsn1 = generate_uuid()
        dsn2 = generate_uuid()
        cnt = generate_uuid()
        files1 = []
        files2 = []
        for i in range(10):
            files1.append({'scope': scope, 'name': generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb'})
            files2.append({'scope': scope, 'name': generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb'})

        self.did_client.add_dataset(scope, dsn1)
        self.did_client.add_files_to_dataset(scope, dsn1, files1, rse=rse)

        self.did_client.add_dataset(scope, dsn2)
        self.did_client.add_files_to_dataset(scope, dsn2, files2, rse=rse)

        self.did_client.add_container(scope, cnt)
        self.did_client.add_datasets_to_container(scope, cnt, [{'scope': scope, 'name': dsn1}, {'scope': scope, 'name': dsn2}])

        replicas = self.replica_client.list_replicas(dids=[{'scope': scope, 'name': dsn1}])
        assert_true(replicas is not None)

        replicas = self.replica_client.list_replicas(dids=[{'scope': scope, 'name': cnt}])
        assert_true(replicas is not None)

    @raises(UnsupportedOperation)
    def test_close(self):
        """ DATA IDENTIFIERS (CLIENT): test to close data identifiers"""

        tmp_rse = 'MOCK'
        tmp_scope = 'mock'

        # Add dataset
        tmp_dataset = 'dsn_%s' % generate_uuid()

        # Add file replica
        tmp_file = 'file_%s' % generate_uuid()
        self.replica_client.add_replica(rse=tmp_rse, scope=tmp_scope, name=tmp_file, bytes=1, adler32='0cc737eb')

        # Add dataset
        self.did_client.add_dataset(scope=tmp_scope, name=tmp_dataset)

        # Add files to dataset
        files = [{'scope': tmp_scope, 'name': tmp_file, 'bytes': 1, 'adler32': '0cc737eb'}, ]
        self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dataset, files=files)

        # Add a second file replica
        tmp_file = 'file_%s' % generate_uuid()
        self.replica_client.add_replica(tmp_rse, tmp_scope, tmp_file, 1, '0cc737eb')
        # Add files to dataset
        files = [{'scope': tmp_scope, 'name': tmp_file, 'bytes': 1, 'adler32': '0cc737eb'}, ]
        self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dataset, files=files)

        # Close dataset
        with assert_raises(UnsupportedStatus):
            self.did_client.set_status(scope=tmp_scope, name=tmp_dataset, close=False)
        self.did_client.set_status(scope=tmp_scope, name=tmp_dataset, open=False)

        # Add a third file replica
        tmp_file = 'file_%s' % generate_uuid()
        self.replica_client.add_replica(tmp_rse, tmp_scope, tmp_file, 1, '0cc737eb')
        # Add files to dataset
        files = [{'scope': tmp_scope, 'name': tmp_file, 'bytes': 1, 'adler32': '0cc737eb'}, ]
        self.did_client.attach_dids(scope=tmp_scope, name=tmp_dataset, dids=files)

    @raises
    def test_open(self):
        """ DATA IDENTIFIERS (CLIENT): test to re-open data identifiers for priv account"""

        tmp_rse = 'MOCK'
        tmp_scope = 'mock'

        # Add dataset
        tmp_dataset = 'dsn_%s' % generate_uuid()

        # Add file replica
        tmp_file = 'file_%s' % generate_uuid()
        self.replica_client.add_replica(rse=tmp_rse, scope=tmp_scope, name=tmp_file, bytes=1, adler32='0cc737eb')

        # Add dataset
        self.did_client.add_dataset(scope=tmp_scope, name=tmp_dataset)

        # Add files to dataset
        files = [{'scope': tmp_scope, 'name': tmp_file, 'bytes': 1, 'adler32': '0cc737eb'}, ]
        self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dataset, files=files)

        # Add a second file replica
        tmp_file = 'file_%s' % generate_uuid()
        self.replica_client.add_replica(tmp_rse, tmp_scope, tmp_file, 1, '0cc737eb')
        # Add files to dataset
        files = [{'scope': tmp_scope, 'name': tmp_file, 'bytes': 1, 'adler32': '0cc737eb'}, ]
        self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dataset, files=files)

        # Close dataset
        with assert_raises(UnsupportedStatus):
            self.did_client.set_status(scope=tmp_scope, name=tmp_dataset, close=False)
        self.did_client.set_status(scope=tmp_scope, name=tmp_dataset, open=False)

        # Add a third file replica
        self.did_client.set_status(scope=tmp_scope, name=tmp_dataset, open=True)

    def test_bulk_get_meta(self):
        """ DATA IDENTIFIERS (CLIENT): Add a new meta data for a list of DIDs and try to retrieve them back"""
        key = 'project'
        rse = 'MOCK'
        scope = 'mock'
        files = ['file_%s' % generate_uuid() for _ in range(4)]
        dst = ['dst_%s' % generate_uuid() for _ in range(4)]
        cnt = ['cnt_%s' % generate_uuid() for _ in range(4)]
        meta_mapping = {}
        list_dids = []
        for idx in range(4):
            self.replica_client.add_replica(rse, scope, files[idx], 1, '0cc737eb')
            self.did_client.set_metadata(scope, files[idx], key, 'file_%s' % idx)
            list_dids.append({'scope': scope, 'name': files[idx]})
            meta_mapping['%s:%s' % (scope, files[idx])] = (key, 'file_%s' % idx)
        for idx in range(4):
            self.did_client.add_did(scope, dst[idx], 'DATASET', statuses=None, meta={key: 'dsn_%s' % idx}, rules=None)
            list_dids.append({'scope': scope, 'name': dst[idx]})
            meta_mapping['%s:%s' % (scope, dst[idx])] = (key, 'dsn_%s' % idx)
        for idx in range(4):
            self.did_client.add_did(scope, cnt[idx], 'CONTAINER', statuses=None, meta={key: 'cnt_%s' % idx}, rules=None)
            list_dids.append({'scope': scope, 'name': cnt[idx]})
            meta_mapping['%s:%s' % (scope, cnt[idx])] = (key, 'cnt_%s' % idx)
        list_meta = [_ for _ in self.did_client.get_metadata_bulk(list_dids)]
        res_list_dids = [{'scope': entry['scope'], 'name': entry['name']} for entry in list_meta]
        res_list_dids.sort()
        list_dids.sort()
        assert_equal(list_dids, res_list_dids)
        for meta in list_meta:
            did = '%s:%s' % (meta['scope'], meta['name'])
            met = meta_mapping[did]
            assert_equal((key, meta[key]), met)
        cnt = ['cnt_%s' % generate_uuid() for _ in range(4)]
        for idx in range(4):
            list_dids.append({'scope': scope, 'name': cnt[idx]})
        list_meta = [_ for _ in self.did_client.get_metadata_bulk(list_dids)]
        assert_equal(len(list_meta), 12)
        list_dids = []
        for idx in range(4):
            list_dids.append({'scope': scope, 'name': cnt[idx]})
        list_meta = [_ for _ in self.did_client.get_metadata_bulk(list_dids)]
        assert_equal(len(list_meta), 0)
Exemplo n.º 20
0
import logging
from rucio.client.rseclient import RSEClient
from rucio.client.didclient import DIDClient
from rucio.client.replicaclient import ReplicaClient
from kafka import KafkaProducer
from datetime import datetime

rucio_account = 'navila'
# DID Client
didclient = DIDClient(account=rucio_account)

# Replica Client
replicaclient = ReplicaClient(account=rucio_account)

# RSE Client
rseclient = RSEClient(account=rucio_account)
rselist = rseclient.list_rses()

# kafka scripts
logger = logging.getLogger(__name__)
logging.basicConfig(
    level=logging.DEBUG,
    format='%(asctime)s [%(levelname)s] %(name)s - %(message)s')


def vs(doc):
    return json.dumps(doc).encode(encoding='utf-8', errors='replace')


kafka_bootstrap_servers = [
    'lssrv03.fnal.gov:9092', 'lssrv04.fnal.gov:9092', 'lssrv05.fnal.gov:9092'
Exemplo n.º 21
0
 def setUp(self):
     """ Setup the Test Case """
     self.did_client = DIDClient()
     self.meta_client = MetaClient()
     self.rse_client = RSEClient()
     self.scope_client = ScopeClient()
Exemplo n.º 22
0
import rucio
from rucio.client import Client
from rucio.client.didclient import DIDClient
from rucio.client.replicaclient import ReplicaClient
from rucio.client.rseclient import RSEClient
from rucio.client.ruleclient import RuleClient
from rucio.client.accountclient import AccountClient

from rucio.common import exception

DID = namedtuple('DID', ['scope', 'name', 'ftype', 'owner', 'path', 'dataset'])

did_client = DIDClient()
rep_client = ReplicaClient()
rse_client = RSEClient()
rule_client = RuleClient()
admin_client = AccountClient()


def list_uk_localgroupdisks():
    """Return a list of known uk localgroup disks
    """
    global rse_client
    r = rse_client.list_rses(
        rse_expression='spacetoken=ATLASLOCALGROUPDISK&cloud=UK')
    return sorted(x['rse'] for x in r)


def get_rse_usage(rse, filters=None):
    return rse_client.get_rse_usage(rse, filters=filters)
Exemplo n.º 23
0
class TestDIDClients:

    def setup(self):
        self.account_client = AccountClient()
        self.scope_client = ScopeClient()
        self.meta_client = MetaClient()
        self.did_client = DIDClient()
        self.replica_client = ReplicaClient()
        self.rse_client = RSEClient()

    def test_list_dids(self):
        """ DATA IDENTIFIERS (CLIENT): List dids by pattern."""
        tmp_scope = scope_name_generator()
        tmp_files = []
        tmp_files.append('file_a_1%s' % generate_uuid())
        tmp_files.append('file_a_2%s' % generate_uuid())
        tmp_files.append('file_b_1%s' % generate_uuid())
        tmp_rse = 'MOCK'

        self.scope_client.add_scope('jdoe', tmp_scope)
        for tmp_file in tmp_files:
            self.replica_client.add_replica(tmp_rse, tmp_scope, tmp_file, 1L, '0cc737eb')

        results = []
        for result in self.did_client.list_dids(tmp_scope, {'name': 'file\_a\_*'}, type='file'):
            results.append(result)
        assert_equal(len(results), 2)
        results = []
        for result in self.did_client.list_dids(tmp_scope, {'name': 'file\_a\_1*'}, type='file'):
            results.append(result)
        assert_equal(len(results), 1)
        results = []
        for result in self.did_client.list_dids(tmp_scope, {'name': 'file\__\_1*'}, type='file'):
            results.append(result)
        assert_equal(len(results), 2)
        results = []
        for result in self.did_client.list_dids(tmp_scope, {'name': 'file*'}, type='file'):
            results.append(result)
        assert_equal(len(results), 3)
        results = []
        for result in self.did_client.list_dids(tmp_scope, {'name': 'file*'}):
            results.append(result)
        assert_equal(len(results), 0)
        with assert_raises(UnsupportedOperation):
            self.did_client.list_dids(tmp_scope, {'name': 'file*'}, type='whateverytype')

    def test_list_by_metadata(self):
        """ DATA IDENTIFIERS (CLIENT): List did with metadata"""
        dsns = []
        tmp_scope = 'mock'
        tmp_dsn1 = 'dsn_%s' % generate_uuid()
        dsns.append(tmp_dsn1)

        dataset_meta = {'project': 'data12_8TeV',
                        'run_number': 400000,
                        'stream_name': 'physics_CosmicCalo',
                        'prod_step': 'merge',
                        'datatype': 'NTUP_TRIG',
                        'version': 'f392_m920',
                        }
        self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn1, meta=dataset_meta)
        tmp_dsn2 = 'dsn_%s' % generate_uuid()
        dsns.append(tmp_dsn2)
        dataset_meta['run_number'] = 400001
        self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn2, meta=dataset_meta)

        tmp_dsn3 = 'dsn_%s' % generate_uuid()
        dsns.append(tmp_dsn3)
        dataset_meta['stream_name'] = 'physics_Egamma'
        dataset_meta['datatype'] = 'NTUP_SMWZ'
        self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn3, meta=dataset_meta)

        dids = self.did_client.list_dids(tmp_scope, {'project': 'data12_8TeV', 'version': 'f392_m920'})
        results = []
        for d in dids:
            results.append(d)
        for dsn in dsns:
            assert_in(dsn, results)
        dsns.remove(tmp_dsn1)

        dids = self.did_client.list_dids(tmp_scope, {'project': 'data12_8TeV', 'run_number': 400001})
        results = []
        for d in dids:
            results.append(d)
        for dsn in dsns:
            assert_in(dsn, results)
        dsns.remove(tmp_dsn2)

        dids = self.did_client.list_dids(tmp_scope, {'project': 'data12_8TeV', 'stream_name': 'physics_Egamma', 'datatype': 'NTUP_SMWZ'})
        results = []
        for d in dids:
            results.append(d)
        for dsn in dsns:
            assert_in(dsn, results)

        with assert_raises(KeyNotFound):
            self.did_client.list_dids(tmp_scope, {'NotReallyAKey': 'NotReallyAValue'})

    def test_add_did(self):
        """ DATA IDENTIFIERS (CLIENT): Add, populate and list did content"""
        tmp_scope = 'mock'
        tmp_rse = 'MOCK'
        tmp_dsn = 'dsn_%s' % generate_uuid()

        # PFN example: rfio://castoratlas.cern.ch/castor/cern.ch/grid/atlas/tzero/xx/xx/xx/filename
        dataset_meta = {'project': 'data13_hip',
                        'run_number': 300000,
                        'stream_name': 'physics_CosmicCalo',
                        'prod_step': 'merge',
                        'datatype': 'NTUP_TRIG',
                        'version': 'f392_m927',
                        }
        rules = [{'copies': 1, 'rse_expression': 'MOCK', 'account': 'root'}]

        with assert_raises(ScopeNotFound):
            self.did_client.add_dataset(scope='Nimportnawak', name=tmp_dsn, statuses={'monotonic': True}, meta=dataset_meta, rules=rules)

        self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn, statuses={'monotonic': True}, meta=dataset_meta, rules=rules)

        with assert_raises(DataIdentifierNotFound):
            self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dsn, files=[{'scope': tmp_scope, 'name': 'lfn.%(tmp_dsn)s.' % locals() + str(generate_uuid()),
                                                                                        'bytes': 724963570L, 'adler32': '0cc737eb'}, ])
        files = []
        for i in xrange(5):
            lfn = 'lfn.%(tmp_dsn)s.' % locals() + str(generate_uuid())
            pfn = 'mock://localhost/tmp/rucio_rse/%(project)s/%(version)s/%(prod_step)s' % dataset_meta
            # it doesn't work with mock: TBF
            # pfn = 'srm://mock2.com:2880/pnfs/rucio/disk-only/scratchdisk/rucio_tests/%(project)s/%(version)s/%(prod_step)s' % dataset_meta
            pfn += '%(tmp_dsn)s/%(lfn)s' % locals()
            file_meta = {'guid': str(generate_uuid()), 'events': 10}
            files.append({'scope': tmp_scope, 'name': lfn,
                          'bytes': 724963570L, 'adler32': '0cc737eb',
                          'pfn': pfn, 'meta': file_meta})

        rules = [{'copies': 1, 'rse_expression': 'CERN-PROD_TZERO', 'lifetime': timedelta(days=2)}]

        self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dsn, files=files, rse=tmp_rse)

        files = []
        for i in xrange(5):
            lfn = '%(tmp_dsn)s.' % locals() + str(generate_uuid())
            pfn = 'mock://localhost/tmp/rucio_rse/%(project)s/%(version)s/%(prod_step)s' % dataset_meta
            # it doesn't work with mock: TBF
            # pfn = 'srm://mock2.com:2880/pnfs/rucio/disk-only/scratchdisk/rucio_tests/%(project)s/%(version)s/%(prod_step)s' % dataset_meta
            pfn += '%(tmp_dsn)s/%(lfn)s' % locals()
            file_meta = {'guid': str(generate_uuid()), 'events': 100}
            files.append({'scope': tmp_scope, 'name': lfn,
                          'bytes': 724963570L, 'adler32': '0cc737eb',
                          'pfn': pfn, 'meta': file_meta})
        rules = [{'copies': 1, 'rse_expression': 'CERN-PROD_TZERO', 'lifetime': timedelta(days=2)}]
        self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dsn, files=files, rse=tmp_rse)

        self.did_client.close(scope=tmp_scope, name=tmp_dsn)

    def test_attach_dids_to_dids(self):
        """ DATA IDENTIFIERS (CLIENT): Attach dids to dids"""
        tmp_scope = 'mock'
        tmp_rse = 'MOCK'
        nb_datasets = 5
        nb_files = 5
        attachments, dsns = list(), list()
        guid_to_query = None
        dsn = {}
        for i in xrange(nb_datasets):
            attachment = {}
            attachment['scope'] = tmp_scope
            attachment['name'] = 'dsn.%s' % str(generate_uuid())
            attachment['rse'] = tmp_rse
            files = []
            for i in xrange(nb_files):
                files.append({'scope': tmp_scope, 'name': 'lfn.%s' % str(generate_uuid()),
                              'bytes': 724963570L, 'adler32': '0cc737eb',
                              'meta': {'guid': str(generate_uuid()), 'events': 100}})
            attachment['dids'] = files
            guid_to_query = files[0]['meta']['guid']
            dsn = {'scope': tmp_scope, 'name': attachment['name']}
            dsns.append(dsn)
            attachments.append(attachment)

        self.did_client.add_datasets(dsns=dsns)
        self.did_client.attach_dids_to_dids(attachments=attachments)
        l = [i for i in self.did_client.get_dataset_by_guid(guid_to_query)]

        assert_equal([dsn], l)

        cnt_name = 'cnt_%s' % generate_uuid()
        self.did_client.add_container(scope='mock', name=cnt_name)
        with assert_raises(UnsupportedOperation):
            self.did_client.attach_dids_to_dids([{'scope': 'mock', 'name': cnt_name, 'rse': tmp_rse, 'dids': attachment['dids']}])

    def test_add_dataset(self):
        """ DATA IDENTIFIERS (CLIENT): Add dataset """
        tmp_scope = 'mock'
        tmp_dsn = 'dsn_%s' % generate_uuid()

        self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn, meta={'project': 'data13_hip'})

        did = self.did_client.get_did(tmp_scope, tmp_dsn)

        assert_equal(did['scope'], tmp_scope)
        assert_equal(did['name'], tmp_dsn)

        with assert_raises(DataIdentifierNotFound):
            self.did_client.get_did('i_dont_exist', 'neither_do_i')

    def test_add_datasets(self):
        """ DATA IDENTIFIERS (CLIENT): Bulk add datasets """
        tmp_scope = 'mock'
        dsns = list()
        for i in xrange(500):
            tmp_dsn = {'name': 'dsn_%s' % generate_uuid(), 'scope': tmp_scope, 'meta': {'project': 'data13_hip'}}
            dsns.append(tmp_dsn)
        self.did_client.add_datasets(dsns)

    def test_exists(self):
        """ DATA IDENTIFIERS (CLIENT): Check if data identifier exists """
        tmp_scope = 'mock'
        tmp_file = 'file_%s' % generate_uuid()
        tmp_rse = 'MOCK'

        self.replica_client.add_replica(tmp_rse, tmp_scope, tmp_file, 1L, '0cc737eb')

        did = self.did_client.get_did(tmp_scope, tmp_file)

        assert_equal(did['scope'], tmp_scope)
        assert_equal(did['name'], tmp_file)

        with assert_raises(DataIdentifierNotFound):
            self.did_client.get_did('i_dont_exist', 'neither_do_i')

    def test_did_hierarchy(self):
        """ DATA IDENTIFIERS (CLIENT): Check did hierarchy rule """

        account = 'jdoe'
        rse = 'MOCK'
        scope = scope_name_generator()
        file = ['file_%s' % generate_uuid() for i in range(10)]
        dst = ['dst_%s' % generate_uuid() for i in range(4)]
        cnt = ['cnt_%s' % generate_uuid() for i in range(4)]

        self.scope_client.add_scope(account, scope)

        for i in range(10):
            self.replica_client.add_replica(rse, scope, file[i], 1, '0cc737eb')
        for i in range(4):
            self.did_client.add_did(scope, dst[i], 'DATASET', statuses=None, meta=None, rules=None)
        for i in range(4):
            self.did_client.add_did(scope, cnt[i], 'CONTAINER', statuses=None, meta=None, rules=None)

        for i in range(4):
            self.did_client.add_files_to_dataset(scope, dst[i], [{'scope': scope, 'name': file[2 * i], 'bytes': 1L, 'adler32': '0cc737eb'},
                                                                 {'scope': scope, 'name': file[2 * i + 1], 'bytes': 1L, 'adler32': '0cc737eb'}])

        self.did_client.add_containers_to_container(scope, cnt[1], [{'scope': scope, 'name': cnt[2]}, {'scope': scope, 'name': cnt[3]}])
        self.did_client.add_datasets_to_container(scope, cnt[0], [{'scope': scope, 'name': dst[1]}, {'scope': scope, 'name': dst[2]}])

        result = self.did_client.scope_list(scope, recursive=True)
        for r in result:
            pass
            # TODO: fix, fix, fix
            # if r['name'] == cnt[1]:
            #    assert_equal(r['type'], 'container')
            #    assert_equal(r['level'], 0)
            # if (r['name'] == cnt[0]) or (r['name'] == dst[0]) or (r['name'] == file[8]) or (r['name'] == file[9]):
            #    assert_equal(r['level'], 0)
            # else:
            #     assert_equal(r['level'], 1)

    def test_detach_did(self):
        """ DATA IDENTIFIERS (CLIENT): Detach dids from a did"""

        account = 'jdoe'
        rse = 'MOCK'
        scope = scope_name_generator()
        file = ['file_%s' % generate_uuid() for i in range(10)]
        dst = ['dst_%s' % generate_uuid() for i in range(4)]
        cnt = ['cnt_%s' % generate_uuid() for i in range(2)]

        self.scope_client.add_scope(account, scope)

        for i in range(10):
            self.replica_client.add_replica(rse, scope, file[i], 1L, '0cc737eb')
        for i in range(4):
            self.did_client.add_dataset(scope, dst[i], statuses=None, meta=None, rules=None)
        for i in range(2):
            self.did_client.add_container(scope, cnt[i], statuses=None, meta=None, rules=None)

        for i in range(4):
            self.did_client.add_files_to_dataset(scope, dst[i], [{'scope': scope, 'name': file[2 * i], 'bytes': 1L, 'adler32': '0cc737eb'},
                                                                 {'scope': scope, 'name': file[2 * i + 1], 'bytes': 1L, 'adler32': '0cc737eb'}])

        self.did_client.add_containers_to_container(scope, cnt[1], [{'scope': scope, 'name': dst[2]}, {'scope': scope, 'name': dst[3]}])

        with assert_raises(UnsupportedOperation):
            self.did_client.add_datasets_to_container(scope, cnt[0], [{'scope': scope, 'name': dst[1]}, {'scope': scope, 'name': cnt[1]}])

        self.did_client.add_datasets_to_container(scope, cnt[0], [{'scope': scope, 'name': dst[1]}, {'scope': scope, 'name': dst[2]}])

        self.did_client.detach_dids(scope, cnt[0], [{'scope': scope, 'name': dst[1]}])
        self.did_client.detach_dids(scope, dst[3], [{'scope': scope, 'name': file[6]}, {'scope': scope, 'name': file[7]}])
        result = self.did_client.scope_list(scope, recursive=True)
        for r in result:
            if r['name'] == dst[1]:
                assert_equal(r['level'], 0)
            if r['type'] is 'file':
                if (r['name'] in file[6:9]):
                    assert_equal(r['level'], 0)
                else:
                    assert_not_equal(r['level'], 0)

        with assert_raises(UnsupportedOperation):
            self.did_client.detach_dids(scope=scope, name=cnt[0], dids=[{'scope': scope, 'name': cnt[0]}])

    def test_scope_list(self):
        """ DATA IDENTIFIERS (CLIENT): Add, aggregate, and list data identifiers in a scope """

        # create some dummy data
        self.tmp_accounts = ['jdoe' for i in xrange(3)]
        self.tmp_scopes = [scope_name_generator() for i in xrange(3)]
        self.tmp_rses = [rse_name_generator() for i in xrange(3)]
        self.tmp_files = ['file_%s' % generate_uuid() for i in xrange(3)]
        self.tmp_datasets = ['dataset_%s' % generate_uuid() for i in xrange(3)]
        self.tmp_containers = ['container_%s' % generate_uuid() for i in xrange(3)]

        # add dummy data to the catalogue
        for i in xrange(3):
            self.scope_client.add_scope(self.tmp_accounts[i], self.tmp_scopes[i])
            self.rse_client.add_rse(self.tmp_rses[i])
            self.replica_client.add_replica(self.tmp_rses[i], self.tmp_scopes[i], self.tmp_files[i], 1L, '0cc737eb')

        # put files in datasets
        for i in xrange(3):
            for j in xrange(3):
                files = [{'scope': self.tmp_scopes[j], 'name': self.tmp_files[j], 'bytes': 1L, 'adler32': '0cc737eb'}]
                self.did_client.add_dataset(self.tmp_scopes[i], self.tmp_datasets[j])
                self.did_client.add_files_to_dataset(self.tmp_scopes[i], self.tmp_datasets[j], files)
Exemplo n.º 24
0
    if args.node_fts_servers:
        servers = PhEDEx_node_FTS_servers(args.node_fts_servers)
        print "FTS servers used by " + args.node_fts_servers + ' PhEDEx node:'
        for s in servers:
            print s
        sys.exit()

    if args.node_protocols:
        PhEDEx_node_protocols(args.node_protocols)
        sys.exit()

    if args.test_auth:
        whoami(account=args.account)

    # create re-usable RSE client connection:
    rse_client = RSEClient(account=args.account, auth_type='x509_proxy')

    if args.list_rses:
        list_rses()

    if args.get_rse_distance:
        (s, d) = args.get_rse_distance
        pprint.pprint(get_rse_distance(s, d))
        sys.exit()

    if args.link_attributes:
        (s, d) = args.link_attributes
        pprint.pprint(PhEDEx_link_attributes(s, d))
        sys.exit()

    # Handle RSE additions and configuration update
Exemplo n.º 25
0
from rucio.client.rseclient import RSEClient
import uuid

c = RSEClient(account="ivm")
for rse in c.list_rses():
    u = uuid.UUID(rse["id"])
    print rse["rse"], u.hex
Exemplo n.º 26
0
    def test_subscriptions_at_different_vos(self):
        """ MULTI VO (CLIENT): Test that subscriptions from 2nd vo don't interfere """

        account_client = AccountClient()
        usr_uuid = str(generate_uuid()).lower()[:16]
        shr_acc = 'shr-%s' % usr_uuid
        account_client.add_account(shr_acc, 'USER', '*****@*****.**')
        add_account(shr_acc, 'USER', '*****@*****.**', 'root', **self.new_vo)

        scope_client = ScopeClient()
        scope_uuid = str(generate_uuid()).lower()[:16]
        tst_scope = 'tst_%s' % scope_uuid
        new_scope = 'new_%s' % scope_uuid
        scope_client.add_scope('root', tst_scope)
        add_scope(new_scope, 'root', 'root', **self.new_vo)

        did_client = DIDClient()
        did_uuid = str(generate_uuid()).lower()[:16]
        tst_did = 'tstset_%s' % did_uuid
        new_did = 'newset_%s' % did_uuid

        rse_client = RSEClient()
        rse_str = ''.join(choice(ascii_uppercase) for x in range(10))
        tst_rse1 = 'TST1_%s' % rse_str
        tst_rse2 = 'TST2_%s' % rse_str
        new_rse1 = 'NEW1_%s' % rse_str
        new_rse2 = 'NEW2_%s' % rse_str
        rse_client.add_rse(tst_rse1)
        rse_client.add_rse(tst_rse2)
        add_rse(new_rse1, 'root', **self.new_vo)
        add_rse(new_rse2, 'root', **self.new_vo)

        acc_lim_client = AccountLimitClient()
        acc_lim_client.set_local_account_limit(shr_acc, tst_rse1, 10)
        acc_lim_client.set_local_account_limit(shr_acc, tst_rse2, 10)
        set_local_account_limit(shr_acc, new_rse1, 10, 'root', **self.new_vo)
        set_local_account_limit(shr_acc, new_rse2, 10, 'root', **self.new_vo)

        did_client.add_did(tst_scope, tst_did, 'DATASET', rse=tst_rse1)
        add_did(new_scope, new_did, 'DATASET', 'root', rse=new_rse1, **self.new_vo)

        sub_client = SubscriptionClient()
        sub_str = generate_uuid()
        tst_sub = 'tstsub_' + sub_str
        new_sub = 'newsub_' + sub_str
        shr_sub = 'shrsub_' + sub_str

        tst_sub_id = sub_client.add_subscription(tst_sub, shr_acc, {'scope': [tst_scope]},
                                                 [{'copies': 1, 'rse_expression': tst_rse2, 'weight': 0,
                                                   'activity': 'User Subscriptions'}],
                                                 '', None, 0, 0)
        shr_tst_sub_id = sub_client.add_subscription(shr_sub, shr_acc, {'scope': [tst_scope]},
                                                     [{'copies': 1, 'rse_expression': tst_rse2, 'weight': 0,
                                                       'activity': 'User Subscriptions'}],
                                                     '', None, 0, 0)

        new_sub_id = add_subscription(new_sub, shr_acc, {'scope': [new_scope]},
                                      [{'copies': 1, 'rse_expression': new_rse2, 'weight': 0, 'activity': 'User Subscriptions'}],
                                      '', False, 0, 0, 3, 'root', **self.new_vo)
        shr_new_sub_id = add_subscription(shr_sub, shr_acc, {'scope': [new_scope]},
                                          [{'copies': 1, 'rse_expression': new_rse2, 'weight': 0, 'activity': 'User Subscriptions'}],
                                          '', False, 0, 0, 3, 'root', **self.new_vo)

        tst_subs = [s['id'] for s in sub_client.list_subscriptions()]
        assert_in(tst_sub_id, tst_subs)
        assert_in(shr_tst_sub_id, tst_subs)
        assert_not_in(new_sub_id, tst_subs)
        assert_not_in(shr_new_sub_id, tst_subs)

        new_subs = [s['id'] for s in list_subscriptions(**self.new_vo)]
        assert_in(new_sub_id, new_subs)
        assert_in(shr_new_sub_id, new_subs)
        assert_not_in(tst_sub_id, new_subs)
        assert_not_in(shr_tst_sub_id, new_subs)

        shr_tst_subs = [s['id'] for s in sub_client.list_subscriptions(name=shr_sub)]
        assert_in(shr_tst_sub_id, shr_tst_subs)
        assert_not_in(shr_new_sub_id, shr_tst_subs)

        shr_new_subs = [s['id'] for s in list_subscriptions(name=shr_sub, **self.new_vo)]
        assert_in(shr_new_sub_id, shr_new_subs)
        assert_not_in(shr_tst_sub_id, shr_new_subs)

        acc_tst_subs = [s['id'] for s in sub_client.list_subscriptions(account=shr_acc)]
        assert_in(tst_sub_id, acc_tst_subs)
        assert_in(shr_tst_sub_id, acc_tst_subs)
        assert_not_in(new_sub_id, acc_tst_subs)
        assert_not_in(shr_new_sub_id, acc_tst_subs)

        acc_new_subs = [s['id'] for s in list_subscriptions(account=shr_acc, **self.new_vo)]
        assert_in(new_sub_id, acc_new_subs)
        assert_in(shr_new_sub_id, acc_new_subs)
        assert_not_in(tst_sub_id, acc_new_subs)
        assert_not_in(shr_tst_sub_id, acc_new_subs)
Exemplo n.º 27
0
#! /usr/bin/env python3

from rucio.client.rseclient import RSEClient
from rucio.common.exception import Duplicate, RSEProtocolNotSupported

rseclient = RSEClient()
rse_name = 'T3_US_Theta'
rse_properties = {
    'ASN': 'ASN',
    'availability': 7,
    'deterministic': True,
    'volatile': False,
    'city': 'Chicago',
    'region_code': 'IL',
    'country_name': 'US',
    'continent': 'NA',
    'time_zone': 'America/Chicago',
    'ISP': None,
    'staging_area': False,
    'rse_type': 'DISK',
    'longitude': 41.88,
    'latitude': -87.63
}
try:
    r = rseclient.add_rse(rse_name, **rse_properties)  # r is true on success
    print('Added Theta %s' % r)
except Duplicate:
    print('Theta existed')

prefix = '/lus/theta-fs0/projects/HighLumin/uscms/'  # Be sure to use a relative path for your endpoint
Exemplo n.º 28
0
def rse_client():
    return RSEClient()
Exemplo n.º 29
0
def get_rse_client(rse, **kwarg):
    '''
    get_rse_client
    '''
    from rucio.client.rseclient import RSEClient
    return RSEClient().get_rse(rse)
Exemplo n.º 30
0
def rse_client():
    from rucio.client.rseclient import RSEClient

    return RSEClient()
Exemplo n.º 31
0
class TestBoolean(unittest.TestCase):
    def setUp(self):
        self.account_client = AccountClient()
        self.rse_client = RSEClient()

        self.account = generate_uuid()[:10]
        self.rse = rse_name_generator()

        self.account_client.add_account(self.account, 'SERVICE',
                                        '*****@*****.**')
        self.rse_client.add_rse(self.rse)

    def tearDown(self):
        self.account_client.delete_account(self.account)
        self.rse_client.delete_rse(self.rse)

    def test_booleanstring_account_attribute(self):
        self.account_client.add_account_attribute(self.account,
                                                  'teststringtrue', 'true')
        self.account_client.add_account_attribute(self.account, 'testinttrue',
                                                  '1')

        self.account_client.add_account_attribute(self.account,
                                                  'teststringfalse', 'false')
        self.account_client.add_account_attribute(self.account, 'testintfalse',
                                                  '0')

        result = {}
        for account in self.account_client.list_account_attributes(
                self.account):
            for res in account:
                result[res['key']] = res['value']

        assert result['teststringtrue'] is True
        assert result['testinttrue'] == '1'
        assert result['teststringfalse'] is False
        assert result['testintfalse'] == '0'

    def test_booleanstring_rse_attribute(self):
        self.rse_client.add_rse_attribute(self.rse, 'teststringtrue', 'true')
        self.rse_client.add_rse_attribute(self.rse, 'testinttrue', '1')

        self.rse_client.add_rse_attribute(self.rse, 'teststringfalse', 'false')
        self.rse_client.add_rse_attribute(self.rse, 'testintfalse', '0')

        result = self.rse_client.list_rse_attributes(self.rse)

        assert result['teststringtrue'] is True
        assert result['testinttrue'] == '1'
        assert result['teststringfalse'] is False
        assert result['testintfalse'] == '0'
Exemplo n.º 32
0
            print s
        sys.exit()

    if args.node_protocols:
        PhEDEx_node_protocols(args.node_protocols)
        sys.exit()

    if args.test_auth:
        whoami(account=args.account)

    # create re-usable RSE client connection:
    if args.password and args.username:
        creds = {'username': args.username, 'password': args.password}
        rse_client = RSEClient(account=args.account,
                               auth_type='userpass',
                               creds=creds,
                               rucio_host=args.host,
                               auth_host=args.auth_host)
    else:
        rse_client = RSEClient(account=args.account, auth_type='x509_proxy')

    if args.list_rses:
        list_rses()

    if args.get_rse_distance:
        (s, d) = args.get_rse_distance
        pprint.pprint(get_rse_distance(s, d))
        sys.exit()

    if args.link_attributes:
        (s, d) = args.link_attributes
Exemplo n.º 33
0
class TestQoS(object):
    @classmethod
    def setupClass(self):
        if config_get_bool('common',
                           'multi_vo',
                           raise_exception=False,
                           default=False):
            self.vo = {
                'vo':
                config_get('client',
                           'vo',
                           raise_exception=False,
                           default='tst')
            }
        else:
            self.vo = {}

        self.rse_client = RSEClient()
        self.tmp_rse_name = rse_name_generator()
        self.rse_client.add_rse(self.tmp_rse_name, vo=self.vo)
        self.tmp_rse = self.rse_client.get_rse(self.tmp_rse_name)['id']

    @classmethod
    def teardownClass(self):
        self.rse_client.delete_rse(self.tmp_rse_name)

    def test_update_and_remove_rse_qos_class(self):
        """ QoS (CORE): Update and remove QoS class for RSE """

        update_rse(self.tmp_rse, {'qos_class': 'fast_and_expensive'})
        rse = get_rse(self.tmp_rse)
        assert_equal(rse['qos_class'], 'fast_and_expensive')

        update_rse(self.tmp_rse, {'qos_class': 'slow_but_cheap'})
        rse = get_rse(self.tmp_rse)
        assert_equal(rse['qos_class'], 'slow_but_cheap')

        update_rse(self.tmp_rse, {'qos_class': None})
        rse = get_rse(self.tmp_rse)
        assert_equal(rse['qos_class'], None)

    def test_update_and_remove_rse_qos_class_client(self):
        """ QoS (CLIENT): Update and remove QoS class for RSE """

        self.rse_client.update_rse(self.tmp_rse_name,
                                   {'qos_class': 'fast_and_expensive'})
        rse = self.rse_client.get_rse(self.tmp_rse_name)
        assert_equal(rse['qos_class'], 'fast_and_expensive')

        self.rse_client.update_rse(self.tmp_rse_name,
                                   {'qos_class': 'slow_but_cheap'})
        rse = self.rse_client.get_rse(self.tmp_rse_name)
        assert_equal(rse['qos_class'], 'slow_but_cheap')

        self.rse_client.update_rse(self.tmp_rse_name, {'qos_class': None})
        rse = self.rse_client.get_rse(self.tmp_rse_name)
        assert_equal(rse['qos_class'], None)

    def test_qos_policies(self):
        """ QoS (CLIENT): Add QoS policy for RSE """

        self.rse_client.add_qos_policy(self.tmp_rse_name, 'FOO')
        policies = self.rse_client.list_qos_policies(self.tmp_rse_name)
        assert_equal(policies, ['FOO'])

        self.rse_client.add_qos_policy(self.tmp_rse_name, 'BAR')
        policies = sorted(self.rse_client.list_qos_policies(self.tmp_rse_name))
        assert_equal(policies, ['BAR', 'FOO'])

        self.rse_client.delete_qos_policy(self.tmp_rse_name, 'BAR')
        policies = self.rse_client.list_qos_policies(self.tmp_rse_name)
        assert_equal(policies, ['FOO'])

        self.rse_client.delete_qos_policy(self.tmp_rse_name, 'FOO')
        policies = self.rse_client.list_qos_policies(self.tmp_rse_name)
        assert_equal(policies, [])