from rucio.core.config import items from rucio.core.monitor import record_timer, record_counter from rucio.core.rse import list_rses from rucio.daemons.conveyor.common import run_conveyor_daemon from rucio.db.sqla.constants import RequestState, RequestType, ReplicaState, BadFilesStatus from rucio.db.sqla.session import transactional_session from rucio.rse import rsemanager try: from urlparse import urlparse # py2 except ImportError: from urllib.parse import urlparse # py3 graceful_stop = threading.Event() region = make_region_memcached(expiration_time=900) def run_once(bulk, db_bulk, suspicious_patterns, retry_protocol_mismatches, heartbeat_handler, activity): worker_number, total_workers, logger = heartbeat_handler.live() try: logger(logging.DEBUG, 'Working on activity %s', activity) time1 = time.time() reqs = request_core.get_next( request_type=[ RequestType.TRANSFER, RequestType.STAGEIN, RequestType.STAGEOUT ], state=[ RequestState.DONE, RequestState.FAILED, RequestState.LOST,
from rucio.core.credential import get_signed_url from rucio.core.heartbeat import live, die, sanity_check, list_payload_counts from rucio.core.message import add_message from rucio.core.replica import list_and_mark_unlocked_replicas, delete_replicas from rucio.core.rse import list_rses, get_rse_limits, get_rse_usage, list_rse_attributes, get_rse_protocols from rucio.core.rse_expression_parser import parse_expression from rucio.core.rule import get_evaluation_backlog from rucio.core.vo import list_vos from rucio.rse import rsemanager as rsemgr if TYPE_CHECKING: from typing import Callable, Tuple GRACEFUL_STOP = threading.Event() REGION = make_region_memcached(expiration_time=600) DELETION_COUNTER = monitor.MultiCounter( prom='rucio_daemons_reaper_deletion_done', statsd='reaper.deletion.done', documentation='Number of deleted replicas') EXCLUDED_RSE_GAUGE = Gauge('rucio_daemons_reaper_excluded_rses', 'Temporarly excluded RSEs', labelnames=('rse', )) def get_rses_to_process(rses, include_rses, exclude_rses, vos): """ Return the list of RSEs to process based on rses, include_rses and exclude_rses :param rses: List of RSEs the reaper should work against. If empty, it considers all RSEs.
from rucio.common.cache import make_region_memcached from rucio.common.config import config_get, config_get_bool, config_get_int from rucio.core import config as config_core from rucio.core.rse import get_rse_id, get_rse_transfer_limits queue_mode = config_get('conveyor', 'queue_mode', False, 'default') if queue_mode.upper() == 'STRICT': queue_mode = 'strict' config_memcache = config_get('conveyor', 'using_memcache', False, 'False') if config_memcache.upper() == 'TRUE': using_memcache = True else: using_memcache = False REGION_SHORT = make_region_memcached( expiration_time=config_get_int('conveyor', 'cache_time', False, 600)) def get_transfer_limits(activity, rse_id, logger=logging.log): """ Get RSE transfer limits. :param activity: The activity. :param rse_id: The RSE id. :param logger: Optional decorated logger that can be passed from the calling daemons or servers. :returns: max_transfers if exists else None. """ try: if queue_mode == 'strict': threshold = get_config_limit(activity, rse_id)
from rucio.db.sqla.session import read_session, transactional_session def token_key_generator(namespace, fni, **kwargs): """ :returns: generate key function """ def generate_key(token, session=None): """ :returns: token """ return token return generate_key if config_get_bool('cache', 'use_external_cache_for_auth_tokens', default=False): TOKENREGION = make_region_memcached( expiration_time=900, function_key_generator=token_key_generator) else: TOKENREGION = make_region( function_key_generator=token_key_generator).configure( 'dogpile.cache.memory', expiration_time=900) @transactional_session def get_auth_token_user_pass(account, username, password, appid, ip=None, session=None): """ Authenticate a Rucio account temporarily via username and password.
from urllib.parse import urlparse import geoip2.database import requests from dogpile.cache.api import NO_VALUE from rucio.common import utils from rucio.common.cache import make_region_memcached from rucio.common.config import config_get, config_get_bool from rucio.common.exception import InvalidRSEExpression from rucio.core.rse_expression_parser import parse_expression if TYPE_CHECKING: from typing import Dict, List, Optional REGION = make_region_memcached(expiration_time=1800, function_key_generator=utils.my_key_generator) def __download_geoip_db(directory, filename): download_url = config_get('core', 'geoip_download_url', raise_exception=False, default=None) verify_tls = config_get_bool('core', 'geoip_download_verify_tls', raise_exception=False, default=True) if not download_url: licence_key = config_get('core', 'geoip_licence_key', raise_exception=False,
import rucio.db.sqla.util from rucio.common import exception from rucio.common.cache import make_region_memcached from rucio.common.config import config_get, config_get_int from rucio.common.exception import DatabaseException from rucio.common.logging import formatted_logger, setup_logging from rucio.common.utils import daemon_sleep from rucio.core import monitor, heartbeat from rucio.core.replica import list_bad_replicas, get_replicas_state, get_bad_replicas_backlog from rucio.core.rule import (update_rules_for_lost_replica, update_rules_for_bad_replica, get_evaluation_backlog) from rucio.db.sqla.constants import ReplicaState GRACEFUL_STOP = threading.Event() REGION = make_region_memcached(expiration_time=config_get_int('necromancer', 'cache_time', False, 600)) def necromancer(thread=0, bulk=5, once=False, sleep_time=60): """ Creates a Necromancer Worker that gets a list of bad replicas for a given hash, identify lost DIDs and for non-lost ones, set the locks and rules for reevaluation. :param thread: Thread number at startup. :param bulk: The number of requests to process. :param once: Run only once. :param sleep_time: Thread sleep time after each chunk of work. """ executable = 'necromancer' hostname = socket.getfqdn()