def option_reset(self, request): """ Fetch and overwrite values from subcampaign """ prepid = request.get_prepid() request_db = Database('requests') with self.locker.get_nonblocking_lock(prepid): request_json = request_db.get(prepid) request = Request(json_input=request_json) if request.get('status') != 'new': raise Exception('It is not allowed to option reset ' 'requests that are not in status "new"') subcampaign_db = Database('subcampaigns') subcampaign_name = request.get('subcampaign') subcampaign_json = subcampaign_db.get(subcampaign_name) if not subcampaign_json: raise Exception( f'Subcampaign "{subcampaign_name}" does not exist') subcampaign = Subcampaign(json_input=subcampaign_json) request.set('memory', subcampaign.get('memory')) request.set('sequences', subcampaign.get('sequences')) request.set('energy', subcampaign.get('energy')) request.set('cmssw_release', subcampaign.get('cmssw_release')) request_db.save(request.get_json()) return request
def after_update(self, old_obj, new_obj, changed_values): self.logger.info('Changed values: %s', changed_values) if 'workflow_name' in changed_values: new_relval = self.create(new_obj.get_json()) old_prepid = old_obj.get_prepid() new_prepid = new_relval.get_prepid() new_relval.set('history', old_obj.get('history')) new_relval.add_history('rename', [old_prepid, new_prepid], None) relvals_db = Database('relvals') relvals_db.save(new_relval.get_json()) self.logger.info('Created %s as rename of %s', new_prepid, old_prepid) new_obj.set('prepid', new_prepid) # Update the ticket... tickets_db = Database('tickets') tickets = tickets_db.query( f'created_relvals={old_obj.get_prepid()}') self.logger.debug(json.dumps(tickets, indent=2)) for ticket_json in tickets: ticket_prepid = ticket_json['prepid'] with self.locker.get_lock(ticket_prepid): ticket_json = tickets_db.get(ticket_prepid) ticket = Ticket(json_input=ticket_json) created_relvals = ticket.get('created_relvals') if old_prepid in created_relvals: created_relvals.remove(old_prepid) created_relvals.append(new_prepid) ticket.set('created_relvals', created_relvals) ticket.add_history('rename', [old_prepid, new_prepid], None) tickets_db.save(ticket.get_json()) self.delete(old_obj.get_json())
def after_delete(self, obj): prepid = obj.get_prepid() tickets_db = Database('tickets') tickets = tickets_db.query(f'created_relvals={prepid}') self.logger.debug(json.dumps(tickets, indent=2)) for ticket_json in tickets: ticket_prepid = ticket_json['prepid'] with self.locker.get_lock(ticket_prepid): ticket_json = tickets_db.get(ticket_prepid) ticket = Ticket(json_input=ticket_json) created_relvals = ticket.get('created_relvals') if prepid in created_relvals: created_relvals.remove(prepid) ticket.set('created_relvals', created_relvals) ticket.add_history('remove_relval', prepid, None) tickets_db.save(ticket.get_json())
def get(self): """ Perform a search """ args = flask.request.args.to_dict() if args is None: args = {} db_name = args.pop('db_name', None) page = int(args.pop('page', 0)) limit = int(args.pop('limit', 20)) sort = args.pop('sort', None) sort_asc = args.pop('sort_asc', 'true').lower() == 'true' # Special cases from_ticket = args.pop('ticket', None) if db_name == 'requests' and from_ticket: ticket_database = Database('tickets') ticket = ticket_database.get(from_ticket) created_requests = ','.join(ticket['created_requests']) prepid_query = args.pop('prepid', '') args['prepid'] = ('%s,%s' % (prepid_query, created_requests)).strip(',') limit = max(1, min(limit, 500)) query_string = '&&'.join(['%s=%s' % (pair) for pair in args.items()]) database = Database(db_name) query_string = database.build_query_with_types(query_string, self.classes[db_name]) results, total_rows = database.query_with_total_rows( query_string=query_string, page=page, limit=limit, sort_attr=sort, sort_asc=sort_asc, ignore_case=True) return self.output_text({ 'response': { 'results': results, 'total_rows': total_rows }, 'success': True, 'message': '' })
def change_request_priority(self, request, priority): """ Change request priority """ prepid = request.get_prepid() request_db = Database('requests') cmsweb_url = Config.get('cmsweb_url') grid_cert = Config.get('grid_user_cert') grid_key = Config.get('grid_user_key') self.logger.info('Will try to change %s priority to %s', prepid, priority) with self.locker.get_nonblocking_lock(prepid): request_json = request_db.get(prepid) request = Request(json_input=request_json) if request.get('status') != 'submitted': raise Exception('It is not allowed to change priority of ' 'requests that are not in status "submitted"') request.set('priority', priority) updated_workflows = [] active_workflows = self.__pick_active_workflows(request) connection = ConnectionWrapper(host=cmsweb_url, keep_open=True, cert_file=grid_cert, key_file=grid_key) for workflow in active_workflows: workflow_name = workflow['name'] self.logger.info('Changing "%s" priority to %s', workflow_name, priority) response = connection.api( 'PUT', f'/reqmgr2/data/request/{workflow_name}', {'RequestPriority': priority}) updated_workflows.append(workflow_name) self.logger.debug(response) connection.close() # Update priority in Stats2 self.force_stats_to_refresh(updated_workflows) # Finally save the request request_db.save(request.get_json()) return request
def update_workflows(self, request): """ Update computing workflows from Stats2 """ prepid = request.get_prepid() request_db = Database('requests') with self.locker.get_lock(prepid): request_json = request_db.get(prepid) request = Request(json_input=request_json) stats_conn = ConnectionWrapper(host='vocms074.cern.ch', port=5984, https=False, keep_open=True) stats_workflows = stats_conn.api( 'GET', f'/requests/_design/_designDoc/_view/prepids?key="{prepid}"&include_docs=True' ) stats_workflows = json.loads(stats_workflows) stats_workflows = [x['doc'] for x in stats_workflows['rows']] existing_workflows = [x['name'] for x in request.get('workflows')] stats_workflows = [x['RequestName'] for x in stats_workflows] all_workflow_names = list( set(existing_workflows) | set(stats_workflows)) self.logger.info('All workflows of %s are %s', prepid, ', '.join(all_workflow_names)) all_workflows = {} for workflow_name in all_workflow_names: workflow = stats_conn.api('GET', f'/requests/{workflow_name}') if not workflow: raise Exception( f'Could not find {workflow_name} in Stats2') workflow = json.loads(workflow) if not workflow.get('RequestName'): raise Exception( f'Could not find {workflow_name} in Stats2') if workflow.get('RequestType', '').lower() == 'resubmission': continue all_workflows[workflow_name] = workflow self.logger.info('Fetched workflow %s', workflow_name) stats_conn.close() output_datasets = self.__get_output_datasets( request, all_workflows) new_workflows = self.__pick_workflows(all_workflows, output_datasets) all_workflow_names = [x['name'] for x in new_workflows] for new_workflow in reversed(new_workflows): completed_events = -1 for output_dataset in new_workflow.get('output_datasets', []): if output_datasets and output_dataset[ 'name'] == output_datasets[-1]: completed_events = output_dataset['events'] break if completed_events != -1: request.set('completed_events', completed_events) break if all_workflow_names: newest_workflow = all_workflows[all_workflow_names[-1]] if 'RequestPriority' in newest_workflow: request.set('priority', newest_workflow['RequestPriority']) if 'TotalEvents' in newest_workflow: request.set('total_events', max(0, newest_workflow['TotalEvents'])) request.set('output_datasets', output_datasets) request.set('workflows', new_workflows) request_db.save(request.get_json()) if output_datasets: subsequent_requests = request_db.query( f'input.request={prepid}') self.logger.info('Found %s subsequent requests for %s: %s', len(subsequent_requests), prepid, [r['prepid'] for r in subsequent_requests]) for subsequent_request_json in subsequent_requests: subsequent_request_prepid = subsequent_request_json.get( 'prepid') self.update_input_dataset( self.get(subsequent_request_prepid)) return request
def create(self, json_data): # Get a subcampaign subcampaign_db = Database('subcampaigns') subcampaign_name = json_data.get('subcampaign') subcampaign_json = subcampaign_db.get(subcampaign_name) if not subcampaign_json: raise Exception(f'Subcampaign "{subcampaign_name}" does not exist') request_db = Database(self.database_name) subcampaign = Subcampaign(json_input=subcampaign_json) json_data['cmssw_release'] = subcampaign.get('cmssw_release') json_data['subcampaign'] = subcampaign.get_prepid() json_data['prepid'] = 'PlaceholderPrepID' new_request = Request(json_input=json_data) if not json_data.get('sequences'): new_request.set('sequences', subcampaign.get('sequences')) if not json_data.get('memory'): new_request.set('memory', subcampaign.get('memory')) if not json_data.get('energy'): new_request.set('energy', subcampaign.get('energy')) request_input = new_request.get('input') input_dataset = request_input.get('dataset') input_request_prepid = request_input.get('request') # Prepid is made of era, dataset and processing string # Either they are taken from input dataset or input request # Only one must be provided self.logger.info(request_input) if input_dataset and input_request_prepid: raise Exception( 'Request cannot have both input request and input dataset, only one' ) if input_dataset and not input_request_prepid: input_dataset_parts = [x for x in input_dataset.split('/') if x] era = input_dataset_parts[1].split('-')[0] dataset = input_dataset_parts[0] elif not input_dataset and input_request_prepid: input_request_json = request_db.get(input_request_prepid) if not input_request_json: raise Exception( f'Request "{input_request_prepid}" does not exist') input_request = Request(json_input=input_request_json) era = input_request.get_era() dataset = input_request.get_dataset() else: raise Exception( 'Request must have either a input request or input dataset') processing_string = new_request.get('processing_string') prepid_middle_part = f'{era}-{dataset}-{processing_string}' with self.locker.get_lock( f'create-request-prepid-{prepid_middle_part}'): # Get a new serial number serial_number = self.get_highest_serial_number( request_db, f'ReReco-{prepid_middle_part}-*') serial_number += 1 prepid = f'ReReco-{prepid_middle_part}-{serial_number:05d}' new_request.set('prepid', prepid) new_request_json = super().create(new_request.get_json()) return new_request_json
def create_requests_for_ticket(self, ticket): """ Create requests from given ticket. Return list of request prepids """ database = Database(self.database_name) ticket_prepid = ticket.get_prepid() created_requests = [] dataset_blacklist = set(Settings().get('dataset_blacklist')) request_controller = RequestController() with self.locker.get_lock(ticket_prepid): ticket = Ticket(json_input=database.get(ticket_prepid)) created_requests = ticket.get('created_requests') status = ticket.get('status') if status != 'new': raise Exception(f'Ticket is not new, it already has ' f'{len(created_requests)} requests created') # In case black list was updated after ticket was created for input_dataset in ticket.get('input_datasets'): dataset = input_dataset.split('/')[1] if dataset in dataset_blacklist: raise Exception( f'Input dataset {input_dataset} is not ' f'allowed because {dataset} is in blacklist') try: for input_dataset in ticket.get('input_datasets'): last_request_prepid = None for step_index, step in enumerate(ticket.get('steps')): subcampaign_name = step['subcampaign'] processing_string = step['processing_string'] time_per_event = step['time_per_event'] size_per_event = step['size_per_event'] priority = step['priority'] new_request_json = { 'subcampaign': subcampaign_name, 'priority': priority, 'processing_string': processing_string, 'time_per_event': time_per_event, 'size_per_event': size_per_event, 'input': { 'dataset': '', 'request': '' } } if step_index == 0: new_request_json['input'][ 'dataset'] = input_dataset else: new_request_json['input'][ 'request'] = last_request_prepid try: runs = request_controller.get_runs( subcampaign_name, input_dataset) new_request_json['runs'] = runs lumis = request_controller.get_lumisections( subcampaign_name, runs) new_request_json['lumisections'] = lumis except Exception as ex: self.logger.error( 'Error getting runs or lumis for %s %s %s: \n%s', subcampaign_name, input_dataset, processing_string, ex) request = request_controller.create(new_request_json) created_requests.append(request) last_request_prepid = request.get('prepid') self.logger.info('Created %s', last_request_prepid) created_request_prepids = [ r.get('prepid') for r in created_requests ] ticket.set('created_requests', created_request_prepids) ticket.set('status', 'done') ticket.add_history('create_requests', created_request_prepids, None) database.save(ticket.get_json()) except Exception as ex: # Delete created requests if there was an Exception for created_request in reversed(created_requests): request_controller.delete( {'prepid': created_request.get('prepid')}) # And reraise the exception raise ex return [r.get('prepid') for r in created_requests]