Ejemplo n.º 1
0
    def after_update(self, old_obj, new_obj, changed_values):
        self.logger.info('Changed values: %s', changed_values)
        if 'workflow_name' in changed_values:
            new_relval = self.create(new_obj.get_json())
            old_prepid = old_obj.get_prepid()
            new_prepid = new_relval.get_prepid()
            new_relval.set('history', old_obj.get('history'))
            new_relval.add_history('rename', [old_prepid, new_prepid], None)
            relvals_db = Database('relvals')
            relvals_db.save(new_relval.get_json())
            self.logger.info('Created %s as rename of %s', new_prepid,
                             old_prepid)
            new_obj.set('prepid', new_prepid)
            # Update the ticket...
            tickets_db = Database('tickets')
            tickets = tickets_db.query(
                f'created_relvals={old_obj.get_prepid()}')
            self.logger.debug(json.dumps(tickets, indent=2))
            for ticket_json in tickets:
                ticket_prepid = ticket_json['prepid']
                with self.locker.get_lock(ticket_prepid):
                    ticket_json = tickets_db.get(ticket_prepid)
                    ticket = Ticket(json_input=ticket_json)
                    created_relvals = ticket.get('created_relvals')
                    if old_prepid in created_relvals:
                        created_relvals.remove(old_prepid)

                    created_relvals.append(new_prepid)
                    ticket.set('created_relvals', created_relvals)
                    ticket.add_history('rename', [old_prepid, new_prepid],
                                       None)
                    tickets_db.save(ticket.get_json())

            self.delete(old_obj.get_json())
Ejemplo n.º 2
0
    def submit_subsequent_requests(self, request):
        """
        Submit all requests that have given request as input
        """
        request_db = Database('requests')
        prepid = request.get_prepid()
        query = f'input.request={prepid}'
        subsequent_requests = request_db.query(query)
        self.logger.info('Found %s subsequent requests for %s: %s',
                         len(subsequent_requests), prepid,
                         [r['prepid'] for r in subsequent_requests])
        for subsequent_request_json in subsequent_requests:
            subsequent_request_prepid = subsequent_request_json.get(
                'prepid', '')
            try:
                subsequent_request = self.get(subsequent_request_prepid)
                self.update_input_dataset(subsequent_request)
                if subsequent_request.get('status') == 'new':
                    self.next_status(subsequent_request)

                if subsequent_request.get('status') == 'approved':
                    self.next_status(subsequent_request)

            except Exception as ex:
                self.logger.error('Error moving %s to next status: %s',
                                  subsequent_request_prepid, ex)
Ejemplo n.º 3
0
    def submit_relval(self, relval, controller):
        """
        Method that is used by submission workers. This is where the actual submission happens
        """
        prepid = relval.get_prepid()
        credentials_file = Config.get('credentials_file')
        workspace_dir = Config.get('remote_path').rstrip('/')
        prepid = relval.get_prepid()
        self.logger.debug('Will try to acquire lock for %s', prepid)
        with Locker().get_lock(prepid):
            self.logger.info('Locked %s for submission', prepid)
            relval_db = Database('relvals')
            relval = controller.get(prepid)
            try:
                self.check_for_submission(relval)
                with SSHExecutor('lxplus.cern.ch', credentials_file) as ssh:
                    # Start executing commands
                    self.prepare_workspace(relval, controller, ssh,
                                           workspace_dir)
                    # Create configs
                    self.generate_configs(relval, ssh, workspace_dir)
                    # Upload configs
                    config_hashes = self.upload_configs(
                        relval, ssh, workspace_dir)
                    # Remove remote relval directory
                    ssh.execute_command([f'rm -rf {workspace_dir}/{prepid}'])

                self.logger.debug(config_hashes)
                # Iterate through uploaded configs and save their hashes in RelVal steps
                self.update_steps_with_config_hashes(relval, config_hashes)
                # Submit job dict to ReqMgr2
                job_dict = controller.get_job_dict(relval)
                cmsweb_url = Config.get('cmsweb_url')
                grid_cert = Config.get('grid_user_cert')
                grid_key = Config.get('grid_user_key')
                connection = ConnectionWrapper(host=cmsweb_url,
                                               cert_file=grid_cert,
                                               key_file=grid_key)
                workflow_name = self.submit_job_dict(job_dict, connection)
                # Update RelVal after successful submission
                relval.set('workflows', [{'name': workflow_name}])
                relval.set('status', 'submitted')
                relval.add_history('submission', 'succeeded', 'automatic')
                relval_db.save(relval.get_json())
                time.sleep(3)
                self.approve_workflow(workflow_name, connection)
                connection.close()
                if not Config.get('development'):
                    refresh_workflows_in_stats([workflow_name])

            except Exception as ex:
                self.__handle_error(relval, str(ex))
                return

            self.__handle_success(relval)

        if not Config.get('development'):
            controller.update_workflows(relval)

        self.logger.info('Successfully finished %s submission', prepid)
Ejemplo n.º 4
0
    def update_subsequent_requests(self, request, values):
        """
        Update all subsequent requests
        """
        request_db = Database('requests')
        prepid = request.get_prepid()
        query = f'input.request={prepid}'
        requests = request_db.query(query)
        self.logger.info('Found %s subsequent requests for %s: %s',
                         len(requests), prepid,
                         [r['prepid'] for r in requests])
        for request_json in requests:
            if request_json.get('status') not in ('new', 'approved'):
                continue

            request_prepid = request_json.get('prepid', '')
            try:
                subsequent_request = self.get(request_prepid)
                for key, value in values.items():
                    subsequent_request.set(key, value)

                self.update(subsequent_request.get_json())
            except Exception as ex:
                self.logger.error('Error updating subsequent request %s: %s',
                                  request_prepid, ex)
Ejemplo n.º 5
0
    def get_relvals(self):
        """
        Return summary of RelVals by status and submitted RelVals by CMSSW and batch name
        """
        start_time = time.time()
        collection = Database('relvals').collection
        status_query = [{'$match': {'deleted': {'$ne': True}}},
                        {'$group': {'_id': '$status', 'count': {'$sum': 1}}}]
        by_status = collection.aggregate(status_query)

        batch_query = [{'$match': {'deleted': {'$ne': True}}},
                       {'$match': {'status': 'submitted'}},
                       {'$group': {'_id': {'release': '$cmssw_release', 'batch': '$batch_name'},
                                   'counts': {'$sum': 1}}},
                       {'$group': {"_id": "$_id.release",
                                   "batches": {"$push": {"batch_name": "$_id.batch",
                                                         "count": "$counts"}}}}]
        by_batch = collection.aggregate(batch_query)
        by_status = list(by_status)
        by_batch = list(by_batch)
        by_batch = sorted(by_batch, key=lambda x: x['_id'], reverse=True)
        for release in by_batch:
            release['batches'] = sorted(release['batches'],
                                        key=lambda x: (x['count'], x['batch_name'].lower()),
                                        reverse=True)

        statuses = ['new', 'approved', 'submitting', 'submitted', 'done', 'archived']
        by_status = sorted(by_status, key=lambda x: statuses.index(x['_id']))
        end_time = time.time()
        self.logger.debug('Getting objects info - RelVals, time taken %.2fs',
                          end_time - start_time)
        return by_status, by_batch
Ejemplo n.º 6
0
    def update_workflows(self, relval):
        """
        Update computing workflows from Stats2
        """
        prepid = relval.get_prepid()
        relval_db = Database('relvals')
        with self.locker.get_lock(prepid):
            relval = self.get(prepid)
            workflow_names = {w['name'] for w in relval.get('workflows')}
            stats_workflows = get_workflows_from_stats_for_prepid(prepid)
            workflow_names -= {w['RequestName'] for w in stats_workflows}
            self.logger.info('%s workflows that are not in stats: %s',
                             len(workflow_names), workflow_names)
            stats_workflows += get_workflows_from_stats(list(workflow_names))
            all_workflows = {}
            for workflow in stats_workflows:
                if not workflow or not workflow.get('RequestName'):
                    raise Exception('Could not find workflow in Stats2')

                name = workflow.get('RequestName')
                all_workflows[name] = workflow
                self.logger.info('Found workflow %s', name)

            output_datasets = self.get_output_datasets(relval, all_workflows)
            workflows = self.pick_workflows(all_workflows, output_datasets)
            relval.set('output_datasets', output_datasets)
            relval.set('workflows', workflows)
            relval_db.save(relval.get_json())

        return relval
Ejemplo n.º 7
0
def dump_documents(database_auth, output_dir, database_name, collections):
    """
    Dump a list of collections to separate directories
    """
    Database.set_database_name(database_name)
    Database.set_credentials_file(database_auth)
    for collection_name in collections:
        print('Collection %s' % (collection_name))
        database = Database(collection_name)
        collection = database.collection
        doc_count = collection.count_documents({})
        print('Found %s documents' % (doc_count))
        documents = [{}]
        page = 0
        limit = 100
        collection_path = f'{output_dir}/{collection_name}'
        os.makedirs(collection_path)
        while documents:
            documents = collection.find({}).sort('_id', 1).skip(page * limit).limit(limit)
            documents = [d for d in documents]
            if not documents:
                break

            file_name = f'{collection_path}/{database_name}_{collection_name}_{page}.json'
            with open(file_name, 'w') as output_file:
                json.dump(documents, output_file)

            print('Page %s done' % (page))
            page += 1
Ejemplo n.º 8
0
 def update_status(self, request, status, timestamp=None):
     """
     Set new status to request, update history accordingly and save to database
     """
     request_db = Database(self.database_name)
     request.set('status', status)
     request.add_history('status', status, None, timestamp)
     request_db.save(request.get_json())
Ejemplo n.º 9
0
    def create_relvals_for_ticket(self, ticket):
        """
        Create RelVals from given ticket. Return list of relval prepids
        """
        ticket_db = Database('tickets')
        ticket_prepid = ticket.get_prepid()
        ssh_executor = SSHExecutor('lxplus.cern.ch', Config.get('credentials_file'))
        relval_controller = RelValController()
        created_relvals = []
        with self.locker.get_lock(ticket_prepid):
            ticket = self.get(ticket_prepid)
            rewrite_gt_string = ticket.get('rewrite_gt_string')
            recycle_input_of = ticket.get('recycle_input_of')
            try:
                workflows = self.generate_workflows(ticket, ssh_executor)
                # Iterate through workflows and create RelVal objects
                relvals = []
                for workflow_id, workflow_dict in workflows.items():
                    relvals.append(self.create_relval_from_workflow(ticket,
                                                                    workflow_id,
                                                                    workflow_dict))

                # Handle recycling if needed
                if recycle_input_of:
                    if rewrite_gt_string:
                        self.recycle_input_with_gt_rewrite(relvals,
                                                           rewrite_gt_string,
                                                           recycle_input_of)
                    else:
                        self.recycle_input(relvals,
                                           relval_controller,
                                           recycle_input_of)

                for relval in relvals:
                    relval = relval_controller.create(relval.get_json())
                    created_relvals.append(relval)
                    self.logger.info('Created %s', relval.get_prepid())

                created_relval_prepids = [r.get('prepid') for r in created_relvals]
                ticket.set('created_relvals', created_relval_prepids)
                ticket.set('status', 'done')
                ticket.add_history('created_relvals', created_relval_prepids, None)
                ticket_db.save(ticket.get_json())
            except Exception as ex:
                self.logger.error('Error creating RelVal from ticket: %s', ex)
                # Delete created relvals if there was an Exception
                for created_relval in reversed(created_relvals):
                    relval_controller.delete({'prepid': created_relval.get('prepid')})

                # And reraise the exception
                raise ex
            finally:
                # Close all SSH connections
                ssh_executor.close_connections()

        return [r.get('prepid') for r in created_relvals]
Ejemplo n.º 10
0
 def update_status(self, relval, status, timestamp=None):
     """
     Set new status to RelVal, update history accordingly and save to database
     """
     relval_db = Database(self.database_name)
     relval.set('status', status)
     relval.add_history('status', status, None, timestamp)
     relval_db.save(relval.get_json())
     self.logger.info('Set "%s" status to "%s"', relval.get_prepid(),
                      status)
Ejemplo n.º 11
0
    def get_tickets(self):
        """
        Return summary of tickets by status
        """
        collection = Database('tickets').collection
        by_status = collection.aggregate([{'$match': {'deleted': {'$ne': True}}},
                                          {'$group': {'_id': '$status',
                                                      'count': {'$sum': 1}}}])

        statuses = ['new', 'done']
        by_status = sorted(list(by_status), key=lambda x: statuses.index(x['_id']))
        self.logger.debug('Tickets - by status %s', len(by_status))
        return by_status
Ejemplo n.º 12
0
    def option_reset(self, request):
        """
        Fetch and overwrite values from subcampaign
        """
        prepid = request.get_prepid()
        request_db = Database('requests')
        with self.locker.get_nonblocking_lock(prepid):
            request_json = request_db.get(prepid)
            request = Request(json_input=request_json)
            if request.get('status') != 'new':
                raise Exception('It is not allowed to option reset '
                                'requests that are not in status "new"')

            subcampaign_db = Database('subcampaigns')
            subcampaign_name = request.get('subcampaign')
            subcampaign_json = subcampaign_db.get(subcampaign_name)
            if not subcampaign_json:
                raise Exception(
                    f'Subcampaign "{subcampaign_name}" does not exist')

            subcampaign = Subcampaign(json_input=subcampaign_json)
            request.set('memory', subcampaign.get('memory'))
            request.set('sequences', subcampaign.get('sequences'))
            request.set('energy', subcampaign.get('energy'))
            request.set('cmssw_release', subcampaign.get('cmssw_release'))
            request_db.save(request.get_json())

        return request
Ejemplo n.º 13
0
    def __check_for_submission(self, request):
        """
        Perform one last check of values before submitting a request
        """
        prepid = request.get_prepid()
        self.logger.debug('Final check before submission for %s', prepid)
        if request.get('status') != 'submitting':
            raise Exception(
                f'Cannot submit a request with status {request.get("status")}')

        if not request.get('input')['dataset']:
            request_db = Database('requests')
            request.set('status', 'approved')
            request_db.save(request.get_json())
            raise Exception('Cannot submit a request without input dataset')
Ejemplo n.º 14
0
    def check_for_create(self, obj):
        subcampaign_database = Database('subcampaigns')
        subcampaign_names = [x['subcampaign'] for x in obj.get('steps')]
        for subcampaign_name in subcampaign_names:
            if not subcampaign_database.document_exists(subcampaign_name):
                raise Exception('Subcampaign %s does not exist' %
                                (subcampaign_name))

        dataset_blacklist = set(Settings().get('dataset_blacklist'))
        for input_dataset in obj.get('input_datasets'):
            dataset = input_dataset.split('/')[1]
            if dataset in dataset_blacklist:
                raise Exception(f'Input dataset {input_dataset} is not '
                                f'allowed because {dataset} is in blacklist')

        return True
Ejemplo n.º 15
0
    def create(self, json_data):
        cmssw_release = json_data.get('cmssw_release')
        batch_name = json_data.get('batch_name')
        # Use workflow name for prepid if possible, if not - first step name
        if json_data.get('workflow_name'):
            workflow_name = json_data['workflow_name']
        else:
            first_step = RelValStep(json_input=json_data.get('steps')[0])
            workflow_name = first_step.get_short_name()
            json_data['workflow_name'] = workflow_name

        prepid_part = f'{cmssw_release}__{batch_name}-{workflow_name}'.strip(
            '-_')
        json_data['prepid'] = f'{prepid_part}-00000'
        relval_db = Database('relvals')
        with self.locker.get_lock(f'generate-relval-prepid-{prepid_part}'):
            # Get a new serial number
            serial_number = self.get_highest_serial_number(
                relval_db, f'{prepid_part}-*')
            serial_number += 1
            # Form a new temporary prepid
            prepid = f'{prepid_part}-{serial_number:05d}'
            json_data['prepid'] = prepid
            relval = super().create(json_data)

        return relval
Ejemplo n.º 16
0
    def get(self):
        """
        Return a list of prepid suggestions for given query
        """
        args = flask.request.args.to_dict()
        if args is None:
            args = {}

        db_name = args.pop('db_name', None)
        query = args.pop('query', None).replace(' ', '.*')
        limit = max(1, min(50, args.pop('limit', 20)))

        if not db_name or not query:
            raise Exception('Bad db_name or query parameter')

        database = Database(db_name)
        db_query = {'prepid': re.compile(f'.*{query}.*', re.IGNORECASE)}
        results = database.collection.find(db_query).limit(limit)
        results = [x['prepid'] for x in results]

        return self.output_text({
            'response': results,
            'success': True,
            'message': ''
        })
Ejemplo n.º 17
0
def move_to_done(database_auth, port):
    """
    Try to move all submitted RelVals to next status
    """
    Database.set_database_name('relval')
    Database.set_credentials_file(database_auth)

    connection = http.client.HTTPConnection('localhost',
                                            port=port,
                                            timeout=300)
    headers = {
        'Content-Type': 'application/json',
        'Adfs-Login': '******',
        'Adfs-Group': 'cms-pdmv-serv'
    }
    relval_db = Database('relvals')
    relvals = [{}]
    page = 0
    while relvals:
        relvals = relval_db.query(query_string='status=submitted', page=page)
        page += 1
        for relval in relvals:
            print(relval['prepid'])
            connection.request('POST',
                               '/api/relvals/next_status',
                               json.dumps(relval),
                               headers=headers)
            response = connection.getresponse()
            response_text = json.loads(response.read())['message']
            print('  %s %s' % (response.code, response_text))
Ejemplo n.º 18
0
    def check_for_delete(self, obj):
        if obj.get('status') != 'new':
            raise Exception(
                'Request must be in status "new" before it is deleted')

        requests_db = Database('requests')
        prepid = obj.get_prepid()
        subsequent_requests_query = f'input.request={prepid}'
        subsequent_requests = requests_db.query(subsequent_requests_query)
        if subsequent_requests:
            subsequent_requests_prepids = ', '.join(
                [r['prepid'] for r in subsequent_requests])
            raise Exception(
                f'Request cannot be deleted because it is input request'
                f'for {subsequent_requests_prepids}. Delete these requests first'
            )

        return True
Ejemplo n.º 19
0
 def __handle_error(self, request, error_message):
     """
     Handle error that occured during submission, modify request accordingly
     """
     request_db = Database('requests')
     request.set('status', 'new')
     request.add_history('submission', 'failed', 'automatic')
     request_db.save(request.get_json())
     service_url = Config.get('service_url')
     emailer = Emailer()
     prepid = request.get_prepid()
     self.logger.warning('Submission of %s failed', prepid)
     subject = f'Request {prepid} submission failed'
     body = f'Hello,\n\nUnfortunately submission of {prepid} failed.\n'
     body += (f'You can find this request at '
              f'{service_url}/requests?prepid={prepid}\n')
     body += f'Error message:\n\n{error_message}'
     recipients = emailer.get_recipients(request)
     emailer.send(subject, body, recipients)
Ejemplo n.º 20
0
    def change_request_priority(self, request, priority):
        """
        Change request priority
        """
        prepid = request.get_prepid()
        request_db = Database('requests')
        cmsweb_url = Config.get('cmsweb_url')
        grid_cert = Config.get('grid_user_cert')
        grid_key = Config.get('grid_user_key')
        self.logger.info('Will try to change %s priority to %s', prepid,
                         priority)
        with self.locker.get_nonblocking_lock(prepid):
            request_json = request_db.get(prepid)
            request = Request(json_input=request_json)
            if request.get('status') != 'submitted':
                raise Exception('It is not allowed to change priority of '
                                'requests that are not in status "submitted"')

            request.set('priority', priority)
            updated_workflows = []
            active_workflows = self.__pick_active_workflows(request)
            connection = ConnectionWrapper(host=cmsweb_url,
                                           keep_open=True,
                                           cert_file=grid_cert,
                                           key_file=grid_key)
            for workflow in active_workflows:
                workflow_name = workflow['name']
                self.logger.info('Changing "%s" priority to %s', workflow_name,
                                 priority)
                response = connection.api(
                    'PUT', f'/reqmgr2/data/request/{workflow_name}',
                    {'RequestPriority': priority})
                updated_workflows.append(workflow_name)
                self.logger.debug(response)

            connection.close()
            # Update priority in Stats2
            self.force_stats_to_refresh(updated_workflows)
            # Finally save the request
            request_db.save(request.get_json())

        return request
Ejemplo n.º 21
0
    def get_workflows_list(self, ticket):
        """
        Get a list of workflow names of created RelVals for RelMon Service
        """
        relvals_db = Database('relvals')
        created_relvals = ticket.get('created_relvals')
        created_relvals_prepids = ','.join(created_relvals)
        query = f'prepid={created_relvals_prepids}'
        results, _ = relvals_db.query_with_total_rows(query, limit=len(created_relvals))
        workflows = []
        for relval in results:
            if not relval['workflows']:
                continue

            workflows.append(relval['workflows'][-1]['name'])

        if not workflows:
            workflows.append('# No workflow names')

        self.logger.debug('Workflow names for %s:\n%s', ticket.get_prepid(), '\n'.join(workflows))
        return workflows
Ejemplo n.º 22
0
    def get_requests(self):
        """
        Return summary of requests by status and submitted requests by processing string
        """
        collection = Database('requests').collection
        by_status = collection.aggregate([{'$match': {'deleted': {'$ne': True}}},
                                          {'$group': {'_id': '$status',
                                                      'count': {'$sum': 1}}}])

        by_processing_string = collection.aggregate([{'$match': {'deleted': {'$ne': True}}},
                                                     {'$match': {'status': 'submitted'}},
                                                     {'$group': {'_id': '$processing_string',
                                                                 'count': {'$sum': 1}}},
                                                     {'$sort': {'count': -1}}])
        statuses = ['new', 'approved', 'submitting', 'submitted', 'done']
        by_status = sorted(list(by_status), key=lambda x: statuses.index(x['_id']))
        by_processing_string = sorted(list(by_processing_string),
                                      key=lambda x: (x['count'], x['_id'].lower()),
                                      reverse=True)
        self.logger.debug('Requests - by status %s, by PS %s',
                          len(by_status),
                          len(by_processing_string))
        return by_status, by_processing_string
Ejemplo n.º 23
0
    def __handle_error(self, relval, error_message):
        """
        Handle error that occured during submission, modify RelVal accordingly
        """
        self.logger.error(error_message)
        relval_db = Database('relvals')
        relval.set('status', 'new')
        relval.set('campaign_timestamp', 0)
        relval.add_history('submission', 'failed', 'automatic')
        for step in relval.get('steps'):
            step.set('config_id', '')
            step.set('resolved_globaltag', '')

        relval_db.save(relval.get_json())
        service_url = Config.get('service_url')
        emailer = Emailer()
        prepid = relval.get_prepid()
        subject = f'RelVal {prepid} submission failed'
        body = f'Hello,\n\nUnfortunately submission of {prepid} failed.\n'
        body += (f'You can find this relval at '
                 f'{service_url}/relvals?prepid={prepid}\n')
        body += f'Error message:\n\n{error_message}'
        recipients = emailer.get_recipients(relval)
        emailer.send(subject, body, recipients)
Ejemplo n.º 24
0
    def create(self, json_data):
        # Clean up the input
        cmssw_release = json_data.get('cmssw_release')
        batch_name = json_data.get('batch_name')
        prepid_part = f'{cmssw_release}__{batch_name}'
        ticket_db = Database('tickets')
        json_data['prepid'] = f'{prepid_part}-00000'
        with self.locker.get_lock(f'generate-ticket-prepid-{prepid_part}'):
            # Get a new serial number
            serial_number = self.get_highest_serial_number(ticket_db,
                                                           f'{prepid_part}-*')
            serial_number += 1
            # Form a new temporary prepid
            prepid = f'{prepid_part}-{serial_number:05d}'
            json_data['prepid'] = prepid
            ticket = super().create(json_data)

        return ticket
Ejemplo n.º 25
0
    def get(self):
        """
        Perform a search
        """
        args = flask.request.args.to_dict()
        if args is None:
            args = {}

        db_name = args.pop('db_name', None)
        page = int(args.pop('page', 0))
        limit = int(args.pop('limit', 20))
        sort = args.pop('sort', None)
        sort_asc = args.pop('sort_asc', 'true').lower() == 'true'

        # Special cases
        from_ticket = args.pop('ticket', None)
        if db_name == 'requests' and from_ticket:
            ticket_database = Database('tickets')
            ticket = ticket_database.get(from_ticket)
            created_requests = ','.join(ticket['created_requests'])
            prepid_query = args.pop('prepid', '')
            args['prepid'] = ('%s,%s' %
                              (prepid_query, created_requests)).strip(',')

        limit = max(1, min(limit, 500))
        query_string = '&&'.join(['%s=%s' % (pair) for pair in args.items()])
        database = Database(db_name)
        query_string = database.build_query_with_types(query_string,
                                                       self.classes[db_name])
        results, total_rows = database.query_with_total_rows(
            query_string=query_string,
            page=page,
            limit=limit,
            sort_attr=sort,
            sort_asc=sort_asc,
            ignore_case=True)

        return self.output_text({
            'response': {
                'results': results,
                'total_rows': total_rows
            },
            'success': True,
            'message': ''
        })
Ejemplo n.º 26
0
    def create(self, json_data):
        # Clean up the input
        ticket_db = Database(self.database_name)
        json_data['prepid'] = 'Temp00001'
        ticket = Ticket(json_input=json_data)
        # Use first subcampaign name for prepid
        subcampaign_name = ticket.get('steps')[0]['subcampaign']
        processing_string = ticket.get('steps')[0]['processing_string']
        prepid_middle_part = f'{subcampaign_name}-{processing_string}'
        with self.locker.get_lock(
                f'create-subcampaign-ticket-prepid-{prepid_middle_part}'):
            # Get a new serial number
            serial_number = self.get_highest_serial_number(
                ticket_db, f'{prepid_middle_part}-*')
            serial_number += 1
            # Form a new temporary prepid
            prepid = f'{prepid_middle_part}-{serial_number:05d}'
            json_data['prepid'] = prepid
            new_ticket_json = super().create(json_data)

        return new_ticket_json
Ejemplo n.º 27
0
    def after_delete(self, obj):
        prepid = obj.get_prepid()
        tickets_db = Database('tickets')
        tickets = tickets_db.query(f'created_relvals={prepid}')
        self.logger.debug(json.dumps(tickets, indent=2))
        for ticket_json in tickets:
            ticket_prepid = ticket_json['prepid']
            with self.locker.get_lock(ticket_prepid):
                ticket_json = tickets_db.get(ticket_prepid)
                ticket = Ticket(json_input=ticket_json)
                created_relvals = ticket.get('created_relvals')
                if prepid in created_relvals:
                    created_relvals.remove(prepid)

                ticket.set('created_relvals', created_relvals)
                ticket.add_history('remove_relval', prepid, None)
                tickets_db.save(ticket.get_json())
Ejemplo n.º 28
0
"""
Script to add run list to relval steps
"""
import sys
import os.path
import os
sys.path.append(os.path.abspath(os.path.pardir))
from core_lib.database.database import Database

Database.set_credentials_file(os.getenv('DB_AUTH'))
Database.set_database_name('relval')

relvals_database = Database('relvals')

total_relvals = relvals_database.get_count()

print('Total relvals: %s' % (total_relvals))

for index, item in enumerate(relvals_database.query(limit=total_relvals)):
    print('Processing entry %s/%s %s' %
          (index + 1, total_relvals, item.get('prepid', '<no-id>')))
    for step in item['steps']:
        step['input']['run'] = step['input'].get('run', [])

    relvals_database.save(item)

print('Done')
Ejemplo n.º 29
0
"""
Script to update data format in database for PhaseII update
"""
import sys
import os.path
import os
sys.path.append(os.path.abspath(os.path.pardir))
from core_lib.database.database import Database

Database.set_credentials_file(os.getenv('DB_AUTH'))
Database.set_database_name('rereco')

request_db = Database('requests')
subcampaign_db = Database('subcampaigns')
old_ticket_db = Database('subcampaign_tickets')
new_ticket_db = Database('tickets')

total_subcampaigns = subcampaign_db.get_count()
total_requests = request_db.get_count()
total_old_tickets = old_ticket_db.get_count()
total_new_tickets = new_ticket_db.get_count()

print('Requests: %s' % (total_requests))
print('Subcampaigns: %s' % (total_subcampaigns))
print('(Old) subcampaign tickets: %s' % (total_old_tickets))
print('(New) tickets: %s' % (total_new_tickets))

for index, subcampaign in enumerate(
        subcampaign_db.query(limit=total_subcampaigns)):
    print('Processing subcampaign %s/%s %s' %
          (index + 1, total_subcampaigns, subcampaign['prepid']))
Ejemplo n.º 30
0
    def get(self):
        """
        Perform a search
        """
        args = flask.request.args.to_dict()
        if args is None:
            args = {}

        db_name = args.pop('db_name', None)
        page = int(args.pop('page', 0))
        limit = int(args.pop('limit', 20))
        sort = args.pop('sort', None)
        sort_asc = args.pop('sort_asc', None)

        # Special cases
        from_ticket = args.pop('ticket', None)
        if db_name == 'relvals' and from_ticket:
            ticket_database = Database('tickets')
            tickets = ticket_database.query(
                query_string=f'prepid={from_ticket}',
                limit=100,
                ignore_case=True)
            created_relvals = []
            for ticket in tickets:
                created_relvals.extend(ticket['created_relvals'])

            created_relvals = ','.join(created_relvals)
            prepid_query = args.pop('prepid', '')
            args['prepid'] = ('%s,%s' %
                              (prepid_query, created_relvals)).strip(',')

        # Sorting logic: by default sort dsc by cration time
        if sort is None:
            sort = 'created_on'

        if sort == 'created_on' and sort_asc is None:
            sort_asc = False

        if sort_asc is None:
            sort_asc = True

        limit = max(1, min(limit, 500))
        sort_asc = str(sort_asc).lower() == 'true'
        query_string = '&&'.join(['%s=%s' % (pair) for pair in args.items()])
        database = Database(db_name)
        query_string = database.build_query_with_types(query_string,
                                                       self.classes[db_name])
        results, total_rows = database.query_with_total_rows(
            query_string=query_string,
            page=page,
            limit=limit,
            sort_attr=sort,
            sort_asc=sort_asc,
            ignore_case=True)

        return self.output_text({
            'response': {
                'results': results,
                'total_rows': total_rows
            },
            'success': True,
            'message': ''
        })