Example #1
0
    def generate_input_json(self, processing):
        try:
            from idds.core import (catalog as core_catalog)

            output_collection = self.get_output_collections()[0]
            contents = core_catalog.get_contents_by_coll_id_status(
                coll_id=output_collection.coll_id)
            points = []
            for content in contents:
                # point = content['content_metadata']['point']
                point = json.loads(content['path'])
                points.append(point)

            job_dir = self.get_working_dir(processing)
            if 'input_json' in self.agent_attributes and self.agent_attributes[
                    'input_json']:
                input_json = self.agent_attributes['input_json']
            else:
                input_json = 'idds_input.json'
            opt_points = {'points': points, 'opt_space': self.opt_space}
            with open(os.path.join(job_dir, input_json), 'w') as f:
                json.dump(opt_points, f)
            return input_json
        except Exception as e:
            raise Exception("Failed to generate idds inputs for HPO: %s" %
                            str(e))
Example #2
0
    def generate_new_contents(self, transform, input_collection,
                              output_collection, points):
        if not isinstance(points, (tuple, list)):
            points = [points]
        avail_points = core_catalog.get_contents_by_coll_id_status(
            coll_id=output_collection['coll_id'])

        output_contents = []
        i = len(avail_points)
        for point in points:
            content_metadata = {
                'input_collection_id': input_collection['coll_id']
            }
            content = {
                'coll_id': output_collection['coll_id'],
                # 'scope': output_collection['scope'],
                'scope': 'hpo',
                'name': str(i),
                'min_id': 0,
                'max_id': 0,
                'path': json.dumps((point, None)),
                'status': ContentStatus.New,
                'content_type': ContentType.PseudoContent,
                'content_metadata': content_metadata
            }
            output_contents.append(content)
            i += 1
        return output_contents
Example #3
0
    def generate_transform_outputs(self, transform, collections):
        self.logger.debug(
            "Generating transform outputs: transform: %s, collections: %s" %
            (transform, collections))
        input_collection = None
        output_collection = None
        for collection in collections:
            if collection['relation_type'] == CollectionRelationType.Input:
                input_collection = collection
            if collection['relation_type'] == CollectionRelationType.Output:
                output_collection = collection

        status = [ContentStatus.New, ContentStatus.Failed]
        contents = core_catalog.get_contents_by_coll_id_status(
            coll_id=input_collection['coll_id'], status=status)
        output_contents = self.generate_transform_output_contents(
            transform, input_collection, output_collection, contents)

        self.logger.debug(
            "Generating transform number of output contents: %s" %
            len(output_contents))

        to_cancel_processing = []
        if transform['status'] == TransformStatus.Extend:
            processings = core_processings.get_processings_by_transform_id(
                transform['transform_id'])
            for processing in processings:
                to_cancel_processing.append(processing['processing_id'])

        new_processing = None
        if output_contents or transform['status'] == TransformStatus.Extend:
            processing_metadata = {
                'transform_id': transform['transform_id'],
                'input_collection': input_collection['coll_id'],
                'output_collection': output_collection['coll_id']
            }
            for key in transform['transform_metadata']:
                processing_metadata[key] = transform['transform_metadata'][key]

            new_processing = {
                'transform_id': transform['transform_id'],
                'status': ProcessingStatus.New,
                'processing_metadata': processing_metadata
            }
            self.logger.debug("Generating transform output processing: %s" %
                              new_processing)

        return {
            'transform': transform,
            'input_collection': input_collection,
            'output_collection': output_collection,
            'input_contents': contents,
            'output_contents': output_contents,
            'processing': new_processing,
            'to_cancel_processing': to_cancel_processing
        }
    def __call__(self, processing, transform, input_collection,
                 output_collection):
        try:
            contents = core_catalog.get_contents_by_coll_id_status(
                coll_id=input_collection['coll_id'])
            files = []
            for content in contents:
                file = '%s:%s' % (content['scope'], content['name'])
                files.append(file)

            input_list = ','.join(files)
            job_dir = self.get_job_dir(processing['processing_id'])
            input_json = 'idds_input.json'
            with open(os.path.join(job_dir, input_json), 'w') as f:
                json.dump(files, f)

            sandbox = None
            if 'sandbox' in transform['transform_metadata']:
                sandbox = transform['transform_metadata']['sandbox']
            executable = transform['transform_metadata']['executable']
            arguments = transform['transform_metadata']['arguments']
            output_json = None
            if 'output_json' in transform['transform_metadata']:
                output_json = transform['transform_metadata']['output_json']

            job_id, outputs = self.submit_job(processing['processing_id'],
                                              sandbox, executable, arguments,
                                              input_list, input_json,
                                              output_json)

            processing_metadata = processing['processing_metadata']
            processing_metadata['job_id'] = job_id
            processing_metadata['submitter'] = self.name
            if not job_id:
                processing_metadata['submit_errors'] = outputs
            else:
                processing_metadata['submit_errors'] = None

            ret = {
                'processing_id':
                processing['processing_id'],
                'status':
                ProcessingStatus.Submitted,
                'next_poll_at':
                datetime.datetime.utcnow() +
                datetime.timedelta(seconds=self.poll_time_period),
                'processing_metadata':
                processing_metadata
            }
            return ret
        except Exception as ex:
            self.logger.error(ex)
            self.logger.error(traceback.format_exc())
            raise exceptions.AgentPluginError(
                '%s: %s' % (str(ex), traceback.format_exc()))
Example #5
0
    def process_monitor_processing(self, processing):
        transform_id = processing['transform_id']
        processing_metadata = processing['processing_metadata']
        input_coll_id = processing_metadata['input_collection']
        input_collection = core_catalog.get_collection(coll_id=input_coll_id)
        output_coll_id = processing_metadata['output_collection']
        output_collection = core_catalog.get_collection(coll_id=output_coll_id)
        output_contents = core_catalog.get_contents_by_coll_id_status(coll_id=output_coll_id)
        transform = core_transforms.get_transform(transform_id)

        ret_poll = self.poll_processing(processing, transform, input_collection, output_collection, output_contents)
        if not ret_poll:
            return {'processing_id': processing['processing_id'],
                    'locking': ProcessingLocking.Idle}

        new_files = []
        if 'new_files' in ret_poll:
            new_files = ret_poll['new_files']
        updated_files = ret_poll['updated_files']
        file_msg = []
        if updated_files:
            file_msg = self.generate_file_message(transform, updated_files)

        processing_parameters = {'status': ret_poll['processing_updates']['status'],
                                 'locking': ProcessingLocking.Idle,
                                 'processing_metadata': ret_poll['processing_updates']['processing_metadata']}
        if 'output_metadata' in ret_poll['processing_updates']:
            processing_parameters['output_metadata'] = ret_poll['processing_updates']['output_metadata']
        updated_processing = {'processing_id': processing['processing_id'],
                              'parameters': processing_parameters}

        ret = {'transform': transform,
               'processing_updates': updated_processing,
               'updated_files': updated_files,
               'new_files': new_files,
               'file_message': file_msg}
        return ret
    def __call__(self, processing, transform, input_collection,
                 output_collection):
        try:
            contents = core_catalog.get_contents_by_coll_id_status(
                coll_id=output_collection['coll_id'])
            points = []
            unevaluated_points = 0
            for content in contents:
                point = content['content_metadata']['point']
                points.append(point)
                if not content['status'] == ContentStatus.Available:
                    unevaluated_points += 1

            if unevaluated_points >= self.min_unevaluated_points:
                # not submit the job
                processing_metadata = processing['processing_metadata']
                processing_metadata['unevaluated_points'] = unevaluated_points
                ret = {
                    'processing_id': processing['processing_id'],
                    'status': ProcessingStatus.New,
                    'processing_metadata': processing_metadata
                }
                return ret

            job_dir = self.get_job_dir(processing['processing_id'])
            input_json = 'idds_input.json'
            with open(os.path.join(job_dir, input_json), 'w') as f:
                json.dump(points, f)

            sandbox = None
            if 'sandbox' in transform['transform_metadata']:
                sandbox = transform['transform_metadata']['sandbox']
            executable = transform['transform_metadata']['executable']
            arguments = transform['transform_metadata']['arguments']
            output_json = None
            if 'output_json' in transform['transform_metadata']:
                output_json = transform['transform_metadata']['output_json']

            param_values = {
                'NUM_POINTS': self.max_unevaluated_points - unevaluated_points,
                'IN': 'input_json',
                'OUT': output_json
            }

            executable = replace_parameters_with_values(
                executable, param_values)
            arguments = replace_parameters_with_values(arguments, param_values)

            input_list = None
            job_id, outputs = self.submit_job(processing['processing_id'],
                                              sandbox, executable, arguments,
                                              input_list, input_json,
                                              output_json)

            processing_metadata = processing['processing_metadata']
            processing_metadata['job_id'] = job_id
            processing_metadata['submitter'] = self.name
            if not job_id:
                processing_metadata['submit_errors'] = outputs
            else:
                processing_metadata['submit_errors'] = None

            ret = {
                'processing_id': processing['processing_id'],
                'status': ProcessingStatus.Submitted,
                'processing_metadata': processing_metadata
            }
            return ret
        except Exception as ex:
            self.logger.error(ex)
            self.logger.error(traceback.format_exc())
            raise exceptions.AgentPluginError(
                '%s: %s' % (str(ex), traceback.format_exc()))
Example #7
0
    def __call__(self, processing, transform, input_collection, output_collection):
        try:
            contents = core_catalog.get_contents_by_coll_id_status(coll_id=output_collection['coll_id'])
            points = []
            unevaluated_points = 0
            for content in contents:
                # point = content['content_metadata']['point']
                point = json.loads(content['path'])
                points.append(point)
                if not content['status'] == ContentStatus.Available:
                    unevaluated_points += 1

            job_dir = self.get_job_dir(processing['processing_id'])
            input_json = 'idds_input.json'
            opt_space = None
            opt_points = {'points': points}
            if 'opt_space' in transform['transform_metadata']:
                opt_space = transform['transform_metadata']['opt_space']
                opt_points['opt_space'] = opt_space
            else:
                opt_points['opt_space'] = None
            with open(os.path.join(job_dir, input_json), 'w') as f:
                json.dump(opt_points, f)

            if 'method' in transform['transform_metadata'] and transform['transform_metadata']['method']:
                status, errors, sandbox, executable, arguments, input_json, output_json, should_transfer_executable = self.get_executable_arguments_for_method(transform['transform_metadata'], input_json, unevaluated_points)
            else:
                status, errors, sandbox, executable, arguments, input_json, output_json, should_transfer_executable = self.get_executable_arguments_for_sandbox(transform['transform_metadata'], input_json, unevaluated_points)

            if status != 0:
                processing_metadata = processing['processing_metadata']
                processing_metadata['job_id'] = None
                processing_metadata['submitter'] = self.name
                processing_metadata['submit_errors'] = errors
                processing_metadata['output_json'] = output_json
                processing_metadata['max_points'] = self.get_max_points(transform['transform_metadata'])
                # processing_metadata['job_dir'] = job_dir
                ret = {'processing_id': processing['processing_id'],
                       'status': ProcessingStatus.Submitted,
                       'processing_metadata': processing_metadata}
            else:
                input_list = None
                job_id, outputs = self.submit_job(processing['processing_id'], sandbox, executable, arguments, input_list, input_json, output_json, should_transfer_executable)

                processing_metadata = processing['processing_metadata']
                processing_metadata['job_id'] = job_id
                processing_metadata['submitter'] = self.name
                processing_metadata['output_json'] = output_json
                processing_metadata['max_points'] = self.get_max_points(transform['transform_metadata'])
                # processing_metadata['job_dir'] = job_dir
                if not job_id:
                    processing_metadata['submit_errors'] = outputs
                else:
                    processing_metadata['submit_errors'] = None

                ret = {'processing_id': processing['processing_id'],
                       'status': ProcessingStatus.Submitted,
                       'next_poll_at': datetime.datetime.utcnow() + datetime.timedelta(seconds=self.poll_time_period),
                       'processing_metadata': processing_metadata}
            return ret
        except Exception as ex:
            self.logger.error(ex)
            self.logger.error(traceback.format_exc())
            raise exceptions.AgentPluginError('%s: %s' % (str(ex), traceback.format_exc()))
Example #8
0
    def __call__(self, processing, transform, input_collection,
                 output_collection):
        try:
            contents = core_catalog.get_contents_by_coll_id_status(
                coll_id=output_collection['coll_id'])
            points = []
            unevaluated_points = 0
            for content in contents:
                # point = content['content_metadata']['point']
                point = json.loads(content['path'])
                points.append(point)
                if not content['status'] == ContentStatus.Available:
                    unevaluated_points += 1
            """
            if self.min_unevaluated_points and unevaluated_points >= self.min_unevaluated_points:
                # not submit the job
                processing_metadata = processing['processing_metadata']
                processing_metadata['unevaluated_points'] = unevaluated_points
                processing_metadata['not_submit'] = 'unevaluated_points(%s) > min_unevaluated_points(%s)' % (unevaluated_points, self.min_unevaluated_points)
                self.logger.info("processing_id(%s) not submit currently because unevaluated_points(%s) >= min_unevaluated_points(%s)" % (processing['processing_id'], unevaluated_points, self.min_unevaluated_points))
                ret = {'processing_id': processing['processing_id'],
                       'status': ProcessingStatus.New,
                       'processing_metadata': processing_metadata}
                return ret

            if 'not_submit' in processing_metadata:
                del processing_metadata['not_submit']
            """

            job_dir = self.get_job_dir(processing['processing_id'])
            input_json = 'idds_input.json'
            opt_space = None
            opt_points = {'points': points}
            if 'opt_space' in transform['transform_metadata']:
                opt_space = transform['transform_metadata']['opt_space']
                opt_points['opt_space'] = opt_space
            else:
                opt_points['opt_space'] = None
            with open(os.path.join(job_dir, input_json), 'w') as f:
                json.dump(opt_points, f)

            if 'method' in transform['transform_metadata'] and transform[
                    'transform_metadata']['method']:
                status, errors, sandbox, executable, arguments, input_json, output_json, should_transfer_executable = self.get_executable_arguments_for_method(
                    transform['transform_metadata'], input_json,
                    unevaluated_points)
            else:
                status, errors, sandbox, executable, arguments, input_json, output_json, should_transfer_executable = self.get_executable_arguments_for_sandbox(
                    transform['transform_metadata'], input_json,
                    unevaluated_points)

            if status != 0:
                processing_metadata = processing['processing_metadata']
                processing_metadata['job_id'] = None
                processing_metadata['submitter'] = self.name
                processing_metadata['submit_errors'] = errors
                processing_metadata['output_json'] = output_json
                ret = {
                    'processing_id': processing['processing_id'],
                    'status': ProcessingStatus.Submitted,
                    'processing_metadata': processing_metadata
                }
            else:
                input_list = None
                job_id, outputs = self.submit_job(processing['processing_id'],
                                                  sandbox, executable,
                                                  arguments, input_list,
                                                  input_json, output_json,
                                                  should_transfer_executable)

                processing_metadata = processing['processing_metadata']
                processing_metadata['job_id'] = job_id
                processing_metadata['submitter'] = self.name
                processing_metadata['output_json'] = output_json
                if not job_id:
                    processing_metadata['submit_errors'] = outputs
                else:
                    processing_metadata['submit_errors'] = None

                ret = {
                    'processing_id': processing['processing_id'],
                    'status': ProcessingStatus.Submitted,
                    'processing_metadata': processing_metadata
                }
            return ret
        except Exception as ex:
            self.logger.error(ex)
            self.logger.error(traceback.format_exc())
            raise exceptions.AgentPluginError(
                '%s: %s' % (str(ex), traceback.format_exc()))