class CurrentFileTransfers(WebModule): def __init__(self, config): WebModule.__init__(self, config) self.rlfsm = RLFSM() self.rlfsm.set_read_only(True) def run(self, caller, request, inventory): sql = 'SELECT q.`id`, q.`batch_id`, ss.`name`, sd.`name`, f.`name`, f.`size` FROM `transfer_tasks` AS q' sql += ' INNER JOIN `file_subscriptions` AS u ON u.`id` = q.`subscription_id`' sql += ' INNER JOIN `sites` AS ss ON ss.`id` = q.`source_id`' sql += ' INNER JOIN `sites` AS sd ON sd.`id` = u.`site_id`' sql += ' INNER JOIN `files` AS f ON f.`id` = u.`file_id`' sql += ' ORDER BY q.`id`' current_tasks = self.rlfsm.db.query(sql) batch_ids = set(s[1] for s in current_tasks) transfers = [] for batch_id in batch_ids: transfers.extend( self.rlfsm.transfer_query.get_transfer_status(batch_id)) transfers_map = dict((t[0], t[1:]) for t in transfers) data = [] for task_id, batch_id, source, destination, lfn, size in current_tasks: try: transfer = transfers_map[task_id] except KeyError: status = 'unknown' start = '' finish = '' else: status = FileQuery.status_name(transfer[0]) if transfer[2] is None: start = '' else: start = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(transfer[2])) if transfer[3] is None: finish = '' else: finish = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(transfer[3])) data.append({ 'id': task_id, 'from': source, 'to': destination, 'lfn': lfn, 'size': size, 'status': status, 'start': start, 'finish': finish }) return data
class RLFSMPhEDExReserveCopyInterface(CopyInterface): """ CopyInterface using the Dynamo RLFSM. """ def __init__(self, config = None): CopyInterface.__init__(self, config) self.rlfsm = RLFSM(config.get('rlfsm', None)) self.mysql = MySQL(config.reserve_db_params) def set_read_only(self, value = True): #override self._read_only = value self.rlfsm.set_read_only(value) def schedule_copies(self, replica_list, operation_id, comments = ''): #override sites = set(r.site for r in replica_list) if len(sites) != 1: raise OperationalError('schedule_copies should be called with a list of replicas at a single site.') LOG.info('Scheduling copy of %d replicas to %s using RLFSM (operation %d)', len(replica_list), list(sites)[0], operation_id) result = [] for replica in replica_list: # Function spec is to return clones (so that if specific block fails to copy, we can return a dataset replica without the block) clone_replica = DatasetReplica(replica.dataset, replica.site) clone_replica.copy(replica) result.append(clone_replica) for block_replica in replica.block_replicas: LOG.debug('Subscribing files for %s', str(block_replica)) if block_replica.file_ids is None: LOG.debug('No file to subscribe for %s', str(block_replica)) return all_files = block_replica.block.files missing_files = all_files - block_replica.files() for lfile in missing_files: self.rlfsm.subscribe_file(block_replica.site, lfile) clone_block_replica = BlockReplica(block_replica.block, block_replica.site, block_replica.group) clone_block_replica.copy(block_replica) clone_block_replica.last_update = int(time.time()) clone_replica.block_replicas.add(clone_block_replica) if not self._read_only: for clone_replica in result: if clone_replica.growing: self.mysql.query('INSERT INTO `phedex_transfer_reservations` (`operation_id`, `item`, `site`, `group`) VALUES (%s, %s, %s, %s)', operation_id, clone_replica.dataset.name, clone_replica.site.name, clone_replica.group.name) else: for block_replica in clone_replica.block_replicas: self.mysql.query('INSERT INTO `phedex_transfer_reservations` (`operation_id`, `item`, `site`, `group`) VALUES (%s, %s, %s, %s)', operation_id, block_replica.block.full_name(), clone_replica.site.name, block_replica.group.name) # no external dependency - everything is a success return result
class CurrentFileTransfers(WebModule): def __init__(self, config): WebModule.__init__(self, config) self.rlfsm = RLFSM() self.rlfsm.set_read_only(True) def run(self, caller, request, inventory): sql = 'SELECT q.`id`, q.`batch_id`, ss.`name`, sd.`name`, f.`name`, f.`size` FROM `transfer_tasks` AS q' sql += ' INNER JOIN `file_subscriptions` AS u ON u.`id` = q.`subscription_id`' sql += ' INNER JOIN `sites` AS ss ON ss.`id` = q.`source_id`' sql += ' INNER JOIN `sites` AS sd ON sd.`id` = u.`site_id`' sql += ' INNER JOIN `files` AS f ON f.`id` = u.`file_id`' sql += ' ORDER BY q.`id`' current_tasks = self.rlfsm.db.query(sql) batch_ids = set(s[1] for s in current_tasks) transfers = [] for batch_id in batch_ids: transfers.extend(self.rlfsm.transfer_query.get_transfer_status(batch_id)) transfers_map = dict((t[0], t[1:]) for t in transfers) data = [] for task_id, batch_id, source, destination, lfn, size in current_tasks: try: transfer = transfers_map[task_id] except KeyError: status = 'unknown' start = '' finish = '' else: status = FileQuery.status_name(transfer[0]) if transfer[2] is None: start = '' else: start = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(transfer[2])) if transfer[3] is None: finish = '' else: finish = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(transfer[3])) data.append({ 'id': task_id, 'from': source, 'to': destination, 'lfn': lfn, 'size': size, 'status': status, 'start': start, 'finish': finish}) return data
class RLFSMDeletionInterface(DeletionInterface): """ DeletionInterface using the Dynamo RLFSM. """ def __init__(self, config = None): DeletionInterface.__init__(self, config) self.rlfsm = RLFSM(config.get('rlfsm', None)) def set_read_only(self, value = True): #override self._read_only = value self.rlfsm.set_read_only(value) def schedule_deletions(self, replica_list, operation_id, comments = ''): #override sites = set(r.site for r, b in replica_list) if len(sites) != 1: raise OperationalError('schedule_copies should be called with a list of replicas at a single site.') site = list(sites)[0] LOG.info('Scheduling deletion of %d replicas from %s using RLFSM (operation %d)', len(replica_list), site.name, operation_id) clones = [] for dataset_replica, block_replicas in replica_list: if block_replicas is None: to_delete = dataset_replica.block_replicas else: to_delete = block_replicas for block_replica in to_delete: for lfile in block_replica.files(): self.rlfsm.desubscribe_file(block_replica.site, lfile) # No external dependency -> all operations are successful clone_replica = DatasetReplica(dataset_replica.dataset, dataset_replica.site) clone_replica.copy(dataset_replica) if block_replicas is None: clones.append((clone_replica, None)) else: clones.append((clone_replica, [])) for block_replica in block_replicas: clone_block_replica = BlockReplica(block_replica.block, block_replica.site) clone_block_replica.copy(block_replica) clone_block_replica.last_update = int(time.time()) clones[-1][1].append(clone_block_replica) return clones def deletion_status(self, operation_id): #override raise NotImplementedError('deletion_status')
class RLFSMCopyInterface(CopyInterface): """ CopyInterface using the Dynamo RLFSM. """ def __init__(self, config = None): CopyInterface.__init__(self, config) self.rlfsm = RLFSM(config.get('rlfsm', None)) def set_read_only(self, value = True): #override self._read_only = value self.rlfsm.set_read_only(value) def schedule_copies(self, replica_list, operation_id, comments = ''): #override sites = set(r.site for r in replica_list) if len(sites) != 1: raise OperationalError('schedule_copies should be called with a list of replicas at a single site.') LOG.info('Scheduling copy of %d replicas to %s using RLFSM (operation %d)', len(replica_list), list(sites)[0], operation_id) result = [] for replica in replica_list: # Function spec is to return clones (so that if specific block fails to copy, we can return a dataset replica without the block) clone_replica = DatasetReplica(replica.dataset, replica.site) clone_replica.copy(replica) result.append(clone_replica) for block_replica in replica.block_replicas: LOG.debug('Subscribing files for %s', str(block_replica)) if block_replica.file_ids is None: LOG.debug('No file to subscribe for %s', str(block_replica)) return all_files = block_replica.block.files missing_files = all_files - block_replica.files() for lfile in missing_files: self.rlfsm.subscribe_file(block_replica.site, lfile) clone_block_replica = BlockReplica(block_replica.block, block_replica.site, block_replica.group) clone_block_replica.copy(block_replica) clone_block_replica.last_update = int(time.time()) clone_replica.block_replicas.add(clone_block_replica) # no external dependency - everything is a success return result
class RLFSMDeletionInterface(DeletionInterface): """ DeletionInterface using the Dynamo RLFSM. """ def __init__(self, config=None): DeletionInterface.__init__(self, config) self.rlfsm = RLFSM(config.get('rlfsm', None)) def set_read_only(self, value=True): #override self._read_only = value self.rlfsm.set_read_only(value) def schedule_deletions(self, replica_list, operation_id, comments=''): #override sites = set(r.site for r, b in replica_list) if len(sites) != 1: raise OperationalError( 'schedule_copies should be called with a list of replicas at a single site.' ) site = list(sites)[0] LOG.info( 'Scheduling deletion of %d replicas from %s using RLFSM (operation %d)', len(replica_list), site.name, operation_id) clones = [] for dataset_replica, block_replicas in replica_list: if block_replicas is None: to_delete = dataset_replica.block_replicas else: to_delete = block_replicas for block_replica in to_delete: for lfile in block_replica.files(): self.rlfsm.desubscribe_file(block_replica.site, lfile) # No external dependency -> all operations are successful clone_replica = DatasetReplica(dataset_replica.dataset, dataset_replica.site) clone_replica.copy(dataset_replica) if block_replicas is None: clones.append((clone_replica, None)) else: clones.append((clone_replica, [])) for block_replica in block_replicas: clone_block_replica = BlockReplica(block_replica.block, block_replica.site) clone_block_replica.copy(block_replica) clone_block_replica.last_update = int(time.time()) clones[-1][1].append(clone_block_replica) return clones def deletion_status(self, operation_id): #override raise NotImplementedError('deletion_status')
parser.add_argument('--id', '-i', metavar = 'ID', dest = 'ids', nargs = '+', type = int, help = 'Subscription ids.') args = parser.parse_args() sys.argv = [] if args.site is None and args.reason is None and args.ids is None: sys.stderr.write('Cannot release all subscriptions.') sys.exit(1) from dynamo.core.executable import inventory, authorized from dynamo.fileop.rlfsm import RLFSM rlfsm = RLFSM() if not authorized: rlfsm.set_read_only() subscriptions = rlfsm.get_subscriptions(inventory, op = 'transfer', status = ['held']) num_released = 0 for subscription in subscriptions: if args.ids is not None and subscription.id not in args.ids: continue if args.site is not None and subscription.destination.name != args.site: continue if args.reason is not None and subscription.hold_reason != args.reason: continue