コード例 #1
0
ファイル: enum.py プロジェクト: monisjaved/rucio
 def process_bind_param(self, value, dialect):
     try:
         if value is None:
             return None
         return value.value
     except AttributeError:
         raise InvalidType('Invalid value/type %s for %s' % (value, self.enum))
コード例 #2
0
ファイル: utils.py プロジェクト: TomasJavurek/rucio
def parse_did_filter_from_string(input_string):
    """
    Parse DID filter options in format 'length<3,type=all' from string.

    :param input_string: String containing the filter options.
    :return: filter dictionary and type as string.
    """
    filters = {}
    type = 'collection'
    if input_string:
        filter_options = input_string.replace(' ', '').split(',')
        for option in filter_options:
            value = None
            key = None

            if '>=' in option:
                key, value = option.split('>=')
                if key == 'length':
                    key = 'length.gte'
            elif '>' in option:
                key, value = option.split('>')
                if key == 'length':
                    key = 'length.gt'
            elif '<=' in option:
                key, value = option.split('<=')
                if key == 'length':
                    key = 'length.lte'
            elif '<' in option:
                key, value = option.split('<')
                if key == 'length':
                    key = 'length.lt'
            elif '=' in option:
                key, value = option.split('=')
                if key == 'created_after' or key == 'created_before':
                    value = datetime.datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%fZ')

            if key == 'type':
                if value.upper() in ['ALL', 'COLLECTION', 'CONTAINER', 'DATASET', 'FILE']:
                    type = value.lower()
                else:
                    raise InvalidType('{0} is not a valid type. Valid types are {1}'.format(value, ['ALL', 'COLLECTION', 'CONTAINER', 'DATASET', 'FILE']))
            elif key in ('length.gt', 'length.lt', 'length.gte', 'length.lte', 'length'):
                try:
                    value = int(value)
                    filters[key] = value
                except ValueError:
                    raise ValueError('Length has to be an integer value.')
                filters[key] = value
            elif isinstance(value, string_types):
                if value.lower() == 'true':
                    value = '1'
                elif value.lower() == 'false':
                    value = '0'
                filters[key] = value
            else:
                filters[key] = value

    return filters, type
コード例 #3
0
    def process_bind_param(self, value, dialect):
        if value is None:
            return value

        if isinstance(value, string_types):
            raise InvalidType(
                'Cannot insert to db. Expected InternalScope, got string type.'
            )
        else:
            return value.internal
コード例 #4
0
def add_files(lfns, account, ignore_availability, session=None):
    """
    Bulk add files :
    - Create the file and replica.
    - If doesn't exist create the dataset containing the file as well as a rule on the dataset on ANY sites.
    - Create all the ascendants of the dataset if they do not exist

    :param lfns: List of lfn (dictionary {'lfn': <lfn>, 'rse': <rse>, 'bytes': <bytes>, 'adler32': <adler32>, 'guid': <guid>, 'pfn': <pfn>}
    :param issuer: The issuer account.
    :param ignore_availability: A boolean to ignore blocklisted sites.
    :session: The session used
    """
    attachments = []
    # The list of scopes is necessary for the extract_scope
    scopes = list_scopes(session=session)
    scopes = [scope.external for scope in scopes]
    exist_lfn = []
    for lfn in lfns:
        # First check if the file exists
        filename = lfn['lfn']
        lfn_scope, _ = extract_scope(filename, scopes)
        lfn_scope = InternalScope(lfn_scope)

        exists, did_type = _exists(lfn_scope, filename)
        if exists:
            continue

        # Get all the ascendants of the file
        lfn_split = filename.split('/')
        lpns = ["/".join(lfn_split[:idx]) for idx in range(2, len(lfn_split))]
        lpns.reverse()
        print(lpns)

        # The parent must be a dataset. Register it as well as the rule
        dsn_name = lpns[0]
        dsn_scope, _ = extract_scope(dsn_name, scopes)
        dsn_scope = InternalScope(dsn_scope)
        exists, did_type = _exists(dsn_scope, dsn_name)
        if exists and did_type == DIDType.CONTAINER:
            raise UnsupportedOperation('Cannot create %s as dataset' %
                                       dsn_name)
        if (dsn_name not in exist_lfn) and not exists:
            print('Will create %s' % dsn_name)
            add_did(scope=dsn_scope,
                    name=dsn_name,
                    type=DIDType.DATASET,
                    account=InternalAccount(account),
                    statuses=None,
                    meta=None,
                    rules=[{
                        'copies': 1,
                        'rse_expression': 'ANY=true',
                        'weight': None,
                        'account': InternalAccount(account),
                        'lifetime': None,
                        'grouping': 'NONE'
                    }],
                    lifetime=None,
                    dids=None,
                    rse_id=None,
                    session=session)
            exist_lfn.append(dsn_name)
            parent_name = lpns[1]
            parent_scope, _ = extract_scope(parent_name, scopes)
            parent_scope = InternalScope(parent_scope)
            attachments.append({
                'scope': parent_scope,
                'name': parent_name,
                'dids': [{
                    'scope': dsn_scope,
                    'name': dsn_name
                }]
            })

        # Register the file
        rse_id = lfn.get('rse_id', None)
        if not rse_id:
            raise InvalidType('Missing rse_id')
        bytes = lfn.get('bytes', None)
        guid = lfn.get('guid', None)
        adler32 = lfn.get('adler32', None)
        pfn = lfn.get('pfn', None)
        files = {
            'scope': lfn_scope,
            'name': filename,
            'bytes': bytes,
            'adler32': adler32
        }
        if pfn:
            files['pfn'] = str(pfn)
        if guid:
            files['meta'] = {'guid': guid}
        add_replicas(rse_id=rse_id,
                     files=[files],
                     dataset_meta=None,
                     account=InternalAccount(account),
                     ignore_availability=ignore_availability,
                     session=session)
        add_rule(dids=[{
            'scope': lfn_scope,
            'name': filename
        }],
                 account=InternalAccount(account),
                 copies=1,
                 rse_expression=lfn['rse'],
                 grouping=None,
                 weight=None,
                 lifetime=86400,
                 locked=None,
                 subscription_id=None,
                 session=session)
        attachments.append({
            'scope': dsn_scope,
            'name': dsn_name,
            'dids': [{
                'scope': lfn_scope,
                'name': filename
            }]
        })

        # Now loop over the ascendants of the dataset and created them
        for lpn in lpns[1:]:
            child_scope, _ = extract_scope(lpn, scopes)
            child_scope = InternalScope(child_scope)
            exists, did_type = _exists(child_scope, lpn)
            if exists and did_type == DIDType.DATASET:
                raise UnsupportedOperation('Cannot create %s as container' %
                                           lpn)
            if (lpn not in exist_lfn) and not exists:
                print('Will create %s' % lpn)
                add_did(scope=child_scope,
                        name=lpn,
                        type=DIDType.CONTAINER,
                        account=InternalAccount(account),
                        statuses=None,
                        meta=None,
                        rules=None,
                        lifetime=None,
                        dids=None,
                        rse_id=None,
                        session=session)
                exist_lfn.append(lpn)
                parent_name = lpns[lpns.index(lpn) + 1]
                parent_scope, _ = extract_scope(parent_name, scopes)
                parent_scope = InternalScope(parent_scope)
                attachments.append({
                    'scope':
                    parent_scope,
                    'name':
                    parent_name,
                    'dids': [{
                        'scope': child_scope,
                        'name': lpn
                    }]
                })
    # Finally attach everything
    attach_dids_to_dids(attachments,
                        account=InternalAccount(account),
                        ignore_duplicate=True,
                        session=session)
コード例 #5
0
def add_files(lfns, account, ignore_availability, vo='def', session=None):
    """
    Bulk add files :
    - Create the file and replica.
    - If doesn't exist create the dataset containing the file as well as a rule on the dataset on ANY sites.
    - Create all the ascendants of the dataset if they do not exist

    :param lfns: List of lfn (dictionary {'lfn': <lfn>, 'rse': <rse>, 'bytes': <bytes>, 'adler32': <adler32>, 'guid': <guid>, 'pfn': <pfn>}
    :param issuer: The issuer account.
    :param ignore_availability: A boolean to ignore blocklisted sites.
    :param vo: The VO to act on
    :param session: The session used
    """
    rule_extension_list = []
    attachments = []
    # The list of scopes is necessary for the extract_scope
    filter_ = {'scope': InternalScope(scope='*', vo=vo)}
    scopes = list_scopes(filter_=filter_, session=session)
    scopes = [scope.external for scope in scopes]
    exist_lfn = []
    try:
        lifetime_dict = config_get(section='dirac', option='lifetime', session=session)
        lifetime_dict = loads(lifetime_dict)
    except ConfigNotFound:
        lifetime_dict = {}
    except JSONDecodeError as err:
        raise InvalidType('Problem parsing lifetime option in dirac section : %s' % str(err))
    except Exception as err:
        raise RucioException(str(err))

    for lfn in lfns:
        # First check if the file exists
        filename = lfn['lfn']
        lfn_scope, _ = extract_scope(filename, scopes)
        lfn_scope = InternalScope(lfn_scope, vo=vo)

        exists, did_type = _exists(lfn_scope, filename)
        if exists:
            continue

        # Get all the ascendants of the file
        lfn_split = filename.split('/')
        lpns = ["/".join(lfn_split[:idx]) for idx in range(2, len(lfn_split))]
        lpns.reverse()
        print(lpns)

        # The parent must be a dataset. Register it as well as the rule
        dsn_name = lpns[0]
        dsn_scope, _ = extract_scope(dsn_name, scopes)
        dsn_scope = InternalScope(dsn_scope, vo=vo)

        # Compute lifetime
        lifetime = None
        if dsn_scope in lifetime_dict:
            lifetime = lifetime_dict[dsn_scope]
        else:
            for pattern in lifetime_dict:
                if re.match(pattern, dsn_scope):
                    lifetime = lifetime_dict[pattern]
                    break

        exists, did_type = _exists(dsn_scope, dsn_name)
        if exists and did_type == DIDType.CONTAINER:
            raise UnsupportedOperation('Cannot create %s as dataset' % dsn_name)
        if (dsn_name not in exist_lfn) and not exists:
            print('Will create %s' % dsn_name)
            # to maintain a compatibility between master and LTS-1.26 branches remove keywords for first 3 arguments
            add_did(dsn_scope,
                    dsn_name,
                    DIDType.DATASET,
                    account=InternalAccount(account, vo=vo),
                    statuses=None,
                    meta=None,
                    rules=[{'copies': 1, 'rse_expression': 'ANY=true', 'weight': None, 'account': InternalAccount(account, vo=vo), 'lifetime': None, 'grouping': 'NONE'}],
                    lifetime=None,
                    dids=None,
                    rse_id=None,
                    session=session)
            exist_lfn.append(dsn_name)
            parent_name = lpns[1]
            parent_scope, _ = extract_scope(parent_name, scopes)
            parent_scope = InternalScope(parent_scope, vo=vo)
            attachments.append({'scope': parent_scope, 'name': parent_name, 'dids': [{'scope': dsn_scope, 'name': dsn_name}]})
            rule_extension_list.append((dsn_scope, dsn_name))
        if lifetime and (dsn_scope, dsn_name) not in rule_extension_list:
            # Reset the lifetime of the rule to the configured value
            rule = [rul for rul in list_rules({'scope': dsn_scope, 'name': dsn_name, 'account': InternalAccount(account, vo=vo)}, session=session) if rul['rse_expression'] == 'ANY=true']
            if rule:
                update_rule(rule[0]['id'], options={'lifetime': lifetime}, session=session)
            rule_extension_list.append((dsn_scope, dsn_name))

        # Register the file
        rse_id = lfn.get('rse_id', None)
        if not rse_id:
            raise InvalidType('Missing rse_id')
        bytes_ = lfn.get('bytes', None)
        guid = lfn.get('guid', None)
        adler32 = lfn.get('adler32', None)
        pfn = lfn.get('pfn', None)
        files = {'scope': lfn_scope, 'name': filename, 'bytes': bytes_, 'adler32': adler32}
        if pfn:
            files['pfn'] = str(pfn)
        if guid:
            files['meta'] = {'guid': guid}
        add_replicas(rse_id=rse_id,
                     files=[files],
                     dataset_meta=None,
                     account=InternalAccount(account, vo=vo),
                     ignore_availability=ignore_availability,
                     session=session)
        add_rule(dids=[{'scope': lfn_scope, 'name': filename}],
                 account=InternalAccount(account, vo=vo),
                 copies=1,
                 rse_expression=lfn['rse'],
                 grouping=None,
                 weight=None,
                 lifetime=86400,
                 locked=None,
                 subscription_id=None,
                 session=session)
        attachments.append({'scope': dsn_scope, 'name': dsn_name, 'dids': [{'scope': lfn_scope, 'name': filename}]})

        # Now loop over the ascendants of the dataset and created them
        for lpn in lpns[1:]:
            child_scope, _ = extract_scope(lpn, scopes)
            child_scope = InternalScope(child_scope, vo=vo)
            exists, did_type = _exists(child_scope, lpn)
            if exists and did_type == DIDType.DATASET:
                raise UnsupportedOperation('Cannot create %s as container' % lpn)
            if (lpn not in exist_lfn) and not exists:
                print('Will create %s' % lpn)
                add_did(child_scope,
                        lpn,
                        DIDType.CONTAINER,
                        account=InternalAccount(account, vo=vo),
                        statuses=None,
                        meta=None,
                        rules=None,
                        lifetime=None,
                        dids=None,
                        rse_id=None,
                        session=session)
                exist_lfn.append(lpn)
                parent_name = lpns[lpns.index(lpn) + 1]
                parent_scope, _ = extract_scope(parent_name, scopes)
                parent_scope = InternalScope(parent_scope, vo=vo)
                attachments.append({'scope': parent_scope, 'name': parent_name, 'dids': [{'scope': child_scope, 'name': lpn}]})
    # Finally attach everything
    attach_dids_to_dids(attachments,
                        account=InternalAccount(account, vo=vo),
                        ignore_duplicate=True,
                        session=session)