Пример #1
0
    def update_subsequent_requests(self, request, values):
        """
        Update all subsequent requests
        """
        request_db = Database('requests')
        prepid = request.get_prepid()
        query = f'input.request={prepid}'
        requests = request_db.query(query)
        self.logger.info('Found %s subsequent requests for %s: %s',
                         len(requests), prepid,
                         [r['prepid'] for r in requests])
        for request_json in requests:
            if request_json.get('status') not in ('new', 'approved'):
                continue

            request_prepid = request_json.get('prepid', '')
            try:
                subsequent_request = self.get(request_prepid)
                for key, value in values.items():
                    subsequent_request.set(key, value)

                self.update(subsequent_request.get_json())
            except Exception as ex:
                self.logger.error('Error updating subsequent request %s: %s',
                                  request_prepid, ex)
Пример #2
0
def move_to_done(database_auth, port):
    """
    Try to move all submitted RelVals to next status
    """
    Database.set_database_name('relval')
    Database.set_credentials_file(database_auth)

    connection = http.client.HTTPConnection('localhost',
                                            port=port,
                                            timeout=300)
    headers = {
        'Content-Type': 'application/json',
        'Adfs-Login': '******',
        'Adfs-Group': 'cms-pdmv-serv'
    }
    relval_db = Database('relvals')
    relvals = [{}]
    page = 0
    while relvals:
        relvals = relval_db.query(query_string='status=submitted', page=page)
        page += 1
        for relval in relvals:
            print(relval['prepid'])
            connection.request('POST',
                               '/api/relvals/next_status',
                               json.dumps(relval),
                               headers=headers)
            response = connection.getresponse()
            response_text = json.loads(response.read())['message']
            print('  %s %s' % (response.code, response_text))
Пример #3
0
    def submit_subsequent_requests(self, request):
        """
        Submit all requests that have given request as input
        """
        request_db = Database('requests')
        prepid = request.get_prepid()
        query = f'input.request={prepid}'
        subsequent_requests = request_db.query(query)
        self.logger.info('Found %s subsequent requests for %s: %s',
                         len(subsequent_requests), prepid,
                         [r['prepid'] for r in subsequent_requests])
        for subsequent_request_json in subsequent_requests:
            subsequent_request_prepid = subsequent_request_json.get(
                'prepid', '')
            try:
                subsequent_request = self.get(subsequent_request_prepid)
                self.update_input_dataset(subsequent_request)
                if subsequent_request.get('status') == 'new':
                    self.next_status(subsequent_request)

                if subsequent_request.get('status') == 'approved':
                    self.next_status(subsequent_request)

            except Exception as ex:
                self.logger.error('Error moving %s to next status: %s',
                                  subsequent_request_prepid, ex)
Пример #4
0
    def after_update(self, old_obj, new_obj, changed_values):
        self.logger.info('Changed values: %s', changed_values)
        if 'workflow_name' in changed_values:
            new_relval = self.create(new_obj.get_json())
            old_prepid = old_obj.get_prepid()
            new_prepid = new_relval.get_prepid()
            new_relval.set('history', old_obj.get('history'))
            new_relval.add_history('rename', [old_prepid, new_prepid], None)
            relvals_db = Database('relvals')
            relvals_db.save(new_relval.get_json())
            self.logger.info('Created %s as rename of %s', new_prepid,
                             old_prepid)
            new_obj.set('prepid', new_prepid)
            # Update the ticket...
            tickets_db = Database('tickets')
            tickets = tickets_db.query(
                f'created_relvals={old_obj.get_prepid()}')
            self.logger.debug(json.dumps(tickets, indent=2))
            for ticket_json in tickets:
                ticket_prepid = ticket_json['prepid']
                with self.locker.get_lock(ticket_prepid):
                    ticket_json = tickets_db.get(ticket_prepid)
                    ticket = Ticket(json_input=ticket_json)
                    created_relvals = ticket.get('created_relvals')
                    if old_prepid in created_relvals:
                        created_relvals.remove(old_prepid)

                    created_relvals.append(new_prepid)
                    ticket.set('created_relvals', created_relvals)
                    ticket.add_history('rename', [old_prepid, new_prepid],
                                       None)
                    tickets_db.save(ticket.get_json())

            self.delete(old_obj.get_json())
Пример #5
0
    def after_delete(self, obj):
        prepid = obj.get_prepid()
        tickets_db = Database('tickets')
        tickets = tickets_db.query(f'created_relvals={prepid}')
        self.logger.debug(json.dumps(tickets, indent=2))
        for ticket_json in tickets:
            ticket_prepid = ticket_json['prepid']
            with self.locker.get_lock(ticket_prepid):
                ticket_json = tickets_db.get(ticket_prepid)
                ticket = Ticket(json_input=ticket_json)
                created_relvals = ticket.get('created_relvals')
                if prepid in created_relvals:
                    created_relvals.remove(prepid)

                ticket.set('created_relvals', created_relvals)
                ticket.add_history('remove_relval', prepid, None)
                tickets_db.save(ticket.get_json())
Пример #6
0
    def check_for_delete(self, obj):
        if obj.get('status') != 'new':
            raise Exception(
                'Request must be in status "new" before it is deleted')

        requests_db = Database('requests')
        prepid = obj.get_prepid()
        subsequent_requests_query = f'input.request={prepid}'
        subsequent_requests = requests_db.query(subsequent_requests_query)
        if subsequent_requests:
            subsequent_requests_prepids = ', '.join(
                [r['prepid'] for r in subsequent_requests])
            raise Exception(
                f'Request cannot be deleted because it is input request'
                f'for {subsequent_requests_prepids}. Delete these requests first'
            )

        return True
Пример #7
0
"""
Script to add run list to relval steps
"""
import sys
import os.path
import os
sys.path.append(os.path.abspath(os.path.pardir))
from core_lib.database.database import Database

Database.set_credentials_file(os.getenv('DB_AUTH'))
Database.set_database_name('relval')

relvals_database = Database('relvals')

total_relvals = relvals_database.get_count()

print('Total relvals: %s' % (total_relvals))

for index, item in enumerate(relvals_database.query(limit=total_relvals)):
    print('Processing entry %s/%s %s' %
          (index + 1, total_relvals, item.get('prepid', '<no-id>')))
    for step in item['steps']:
        step['input']['run'] = step['input'].get('run', [])

    relvals_database.save(item)

print('Done')
Пример #8
0
subcampaign_db = Database('subcampaigns')
old_ticket_db = Database('subcampaign_tickets')
new_ticket_db = Database('tickets')

total_subcampaigns = subcampaign_db.get_count()
total_requests = request_db.get_count()
total_old_tickets = old_ticket_db.get_count()
total_new_tickets = new_ticket_db.get_count()

print('Requests: %s' % (total_requests))
print('Subcampaigns: %s' % (total_subcampaigns))
print('(Old) subcampaign tickets: %s' % (total_old_tickets))
print('(New) tickets: %s' % (total_new_tickets))

for index, subcampaign in enumerate(
        subcampaign_db.query(limit=total_subcampaigns)):
    print('Processing subcampaign %s/%s %s' %
          (index + 1, total_subcampaigns, subcampaign['prepid']))
    subcampaign.pop('_rev', None)
    subcampaign.pop('step', None)
    subcampaign_db.save(subcampaign)

for index, request in enumerate(request_db.query(limit=total_requests)):
    print('Processing request %s/%s %s' %
          (index + 1, total_requests, request['prepid']))
    request.pop('_rev', None)
    request.pop('step', None)
    if 'input_dataset' in request:
        request['input'] = {
            'dataset': request.pop('input_dataset'),
            'request': ''
Пример #9
0
sys.path.append(os.path.abspath(os.path.pardir))
from core_lib.database.database import Database

Database.set_credentials_file(os.getenv('DB_AUTH'))
Database.set_database_name('rereco')

request_db = Database('requests')
subcampaign_db = Database('subcampaigns')

total_subcampaigns = subcampaign_db.get_count()
total_requests = request_db.get_count()

print('Requests: %s' % (total_requests))
print('Subcampaigns: %s' % (total_subcampaigns))

for index, subcampaign in enumerate(subcampaign_db.query(limit=total_subcampaigns)):
    print('Processing subcampaign %s/%s %s' % (index + 1,
                                               total_subcampaigns,
                                               subcampaign['prepid']))
    subcampaign.pop('scram_arch', None)
    subcampaign_db.save(subcampaign)

for index, request in enumerate(request_db.query(limit=total_requests)):
    print('Processing request %s/%s %s' % (index + 1, total_requests, request['prepid']))
    request.pop('scram_arch', None)
    request_db.save(request)


total_subcampaigns = subcampaign_db.get_count()
total_requests = request_db.get_count()
Пример #10
0
    def get(self):
        """
        Perform a search
        """
        args = flask.request.args.to_dict()
        if args is None:
            args = {}

        db_name = args.pop('db_name', None)
        page = int(args.pop('page', 0))
        limit = int(args.pop('limit', 20))
        sort = args.pop('sort', None)
        sort_asc = args.pop('sort_asc', None)

        # Special cases
        from_ticket = args.pop('ticket', None)
        if db_name == 'relvals' and from_ticket:
            ticket_database = Database('tickets')
            tickets = ticket_database.query(
                query_string=f'prepid={from_ticket}',
                limit=100,
                ignore_case=True)
            created_relvals = []
            for ticket in tickets:
                created_relvals.extend(ticket['created_relvals'])

            created_relvals = ','.join(created_relvals)
            prepid_query = args.pop('prepid', '')
            args['prepid'] = ('%s,%s' %
                              (prepid_query, created_relvals)).strip(',')

        # Sorting logic: by default sort dsc by cration time
        if sort is None:
            sort = 'created_on'

        if sort == 'created_on' and sort_asc is None:
            sort_asc = False

        if sort_asc is None:
            sort_asc = True

        limit = max(1, min(limit, 500))
        sort_asc = str(sort_asc).lower() == 'true'
        query_string = '&&'.join(['%s=%s' % (pair) for pair in args.items()])
        database = Database(db_name)
        query_string = database.build_query_with_types(query_string,
                                                       self.classes[db_name])
        results, total_rows = database.query_with_total_rows(
            query_string=query_string,
            page=page,
            limit=limit,
            sort_attr=sort,
            sort_asc=sort_asc,
            ignore_case=True)

        return self.output_text({
            'response': {
                'results': results,
                'total_rows': total_rows
            },
            'success': True,
            'message': ''
        })
Пример #11
0
import sys
import os.path
import os
sys.path.append(os.path.abspath(os.path.pardir))
from core_lib.database.database import Database

Database.set_credentials_file(os.getenv('DB_AUTH'))
Database.set_database_name('relval')

database = Database('relvals')

total_entries = database.get_count()

print('Total entries: %s' % (total_entries))

for index, item in enumerate(database.query(limit=total_entries)):
    print('Processing entry %s/%s %s' %
          (index + 1, total_entries, item.get('prepid', '<no-id>')))
    item['job_dict_overwrite'] = {}
    database.save(item)

print('Done')
Пример #12
0
    def update_workflows(self, request):
        """
        Update computing workflows from Stats2
        """
        prepid = request.get_prepid()
        request_db = Database('requests')
        with self.locker.get_lock(prepid):
            request_json = request_db.get(prepid)
            request = Request(json_input=request_json)
            stats_conn = ConnectionWrapper(host='vocms074.cern.ch',
                                           port=5984,
                                           https=False,
                                           keep_open=True)
            stats_workflows = stats_conn.api(
                'GET',
                f'/requests/_design/_designDoc/_view/prepids?key="{prepid}"&include_docs=True'
            )
            stats_workflows = json.loads(stats_workflows)
            stats_workflows = [x['doc'] for x in stats_workflows['rows']]
            existing_workflows = [x['name'] for x in request.get('workflows')]
            stats_workflows = [x['RequestName'] for x in stats_workflows]
            all_workflow_names = list(
                set(existing_workflows) | set(stats_workflows))
            self.logger.info('All workflows of %s are %s', prepid,
                             ', '.join(all_workflow_names))
            all_workflows = {}
            for workflow_name in all_workflow_names:
                workflow = stats_conn.api('GET', f'/requests/{workflow_name}')
                if not workflow:
                    raise Exception(
                        f'Could not find {workflow_name} in Stats2')

                workflow = json.loads(workflow)
                if not workflow.get('RequestName'):
                    raise Exception(
                        f'Could not find {workflow_name} in Stats2')

                if workflow.get('RequestType', '').lower() == 'resubmission':
                    continue

                all_workflows[workflow_name] = workflow
                self.logger.info('Fetched workflow %s', workflow_name)

            stats_conn.close()
            output_datasets = self.__get_output_datasets(
                request, all_workflows)
            new_workflows = self.__pick_workflows(all_workflows,
                                                  output_datasets)
            all_workflow_names = [x['name'] for x in new_workflows]
            for new_workflow in reversed(new_workflows):
                completed_events = -1
                for output_dataset in new_workflow.get('output_datasets', []):
                    if output_datasets and output_dataset[
                            'name'] == output_datasets[-1]:
                        completed_events = output_dataset['events']
                        break

                if completed_events != -1:
                    request.set('completed_events', completed_events)
                    break

            if all_workflow_names:
                newest_workflow = all_workflows[all_workflow_names[-1]]
                if 'RequestPriority' in newest_workflow:
                    request.set('priority', newest_workflow['RequestPriority'])

                if 'TotalEvents' in newest_workflow:
                    request.set('total_events',
                                max(0, newest_workflow['TotalEvents']))

            request.set('output_datasets', output_datasets)
            request.set('workflows', new_workflows)
            request_db.save(request.get_json())

            if output_datasets:
                subsequent_requests = request_db.query(
                    f'input.request={prepid}')
                self.logger.info('Found %s subsequent requests for %s: %s',
                                 len(subsequent_requests), prepid,
                                 [r['prepid'] for r in subsequent_requests])
                for subsequent_request_json in subsequent_requests:
                    subsequent_request_prepid = subsequent_request_json.get(
                        'prepid')
                    self.update_input_dataset(
                        self.get(subsequent_request_prepid))

        return request
Пример #13
0
    def move_relvals_to_submitting(self, relvals):
        """
        Try to add RelVals to submission queue and get sumbitted
        """
        results = []
        dataset_access_types = self.get_dataset_access_types(relvals)
        for relval in relvals:
            prepid = relval.get_prepid()
            with self.locker.get_nonblocking_lock(prepid):
                batch_name = relval.get('batch_name')
                cmssw_release = relval.get('cmssw_release')
                relval_db = Database('relvals')
                # Make sure all datasets are VALID in DBS
                steps = relval.get('steps')
                for step in steps:
                    if step.get_step_type() == 'input_file':
                        dataset = step.get('input')['dataset']
                    elif step.get('driver')['pileup_input']:
                        dataset = step.get('driver')['pileup_input']
                    else:
                        continue

                    dataset = dataset[dataset.index('/'):]
                    access_type = dataset_access_types[dataset]
                    if access_type.lower() != 'valid':
                        raise Exception(
                            f'{dataset} type is {access_type}, it must be VALID'
                        )

                # Create or find campaign timestamp
                # Threshold in seconds
                threshold = 3600
                locker_key = f'move-relval-to-submitting-{cmssw_release}__{batch_name}'
                with self.locker.get_lock(locker_key):
                    now = int(time.time())
                    # Get RelVal with newest timestamp in this campaign (CMSSW + Batch Name)
                    db_query = f'cmssw_release={cmssw_release}&&batch_name={batch_name}'
                    relvals_with_timestamp = relval_db.query(
                        db_query,
                        limit=1,
                        sort_attr='campaign_timestamp',
                        sort_asc=False)
                    newest_timestamp = 0
                    if relvals_with_timestamp:
                        newest_timestamp = relvals_with_timestamp[0].get(
                            'campaign_timestamp', 0)

                    self.logger.info(
                        'Newest timestamp for %s__%s is %s (%s), threshold is %s',
                        cmssw_release, batch_name, newest_timestamp,
                        (newest_timestamp - now), threshold)
                    if newest_timestamp == 0 or newest_timestamp < now - threshold:
                        newest_timestamp = now

                    self.logger.info(
                        'Campaign timestamp for %s__%s will be set to %s',
                        cmssw_release, batch_name, newest_timestamp)
                    relval.set('campaign_timestamp', newest_timestamp)
                    self.update_status(relval, 'submitting')

                RequestSubmitter().add(relval, self)
                results.append(relval)

        return results