コード例 #1
0
ファイル: job_controller.py プロジェクト: xthzhjwzyc/FATE
 def save_pipeline(job_id, role, party_id, model_id, model_version):
     schedule_logger(job_id).info(
         'job {} on {} {} start to save pipeline'.format(
             job_id, role, party_id))
     job_dsl, job_runtime_conf, train_runtime_conf = job_utils.get_job_configuration(
         job_id=job_id, role=role, party_id=party_id)
     job_parameters = job_runtime_conf.get('job_parameters', {})
     job_type = job_parameters.get('job_type', '')
     if job_type == 'predict':
         return
     dag = job_utils.get_job_dsl_parser(
         dsl=job_dsl,
         runtime_conf=job_runtime_conf,
         train_runtime_conf=train_runtime_conf)
     predict_dsl = dag.get_predict_dsl(role=role)
     pipeline = pipeline_pb2.Pipeline()
     pipeline.inference_dsl = json_dumps(predict_dsl, byte=True)
     pipeline.train_dsl = json_dumps(job_dsl, byte=True)
     pipeline.train_runtime_conf = json_dumps(job_runtime_conf, byte=True)
     pipeline.fate_version = RuntimeConfig.get_env("FATE")
     pipeline.model_id = model_id
     pipeline.model_version = model_version
     job_tracker = Tracking(job_id=job_id,
                            role=role,
                            party_id=party_id,
                            model_id=model_id,
                            model_version=model_version)
     job_tracker.save_pipeline(pipelined_buffer_object=pipeline)
     schedule_logger(job_id).info(
         'job {} on {} {} save pipeline successfully'.format(
             job_id, role, party_id))
コード例 #2
0
def save_commit_tmp(commit_id, data_table_namespace, tag, branch):
    version_tmp_table = get_commit_tmp_table(
        data_table_namespace=data_table_namespace)
    version_tmp_table.put(commit_id,
                          json_dumps({
                              "tag": tag,
                              "branch": branch
                          }),
                          use_serialize=False)
コード例 #3
0
ファイル: job_utils.py プロジェクト: xthzhjwzyc/FedRec
def update_job_progress(job_id, dag, current_task_id):
    role, party_id = query_job_info(job_id)
    component_count = len(
        dag.get_dependency(role=role,
                           party_id=int(party_id))['component_list'])
    success_count = success_task_count(job_id=job_id)
    job = Job()
    job.f_progress = float(success_count) / component_count * 100
    job.f_update_time = current_timestamp()
    job.f_current_tasks = json_dumps([current_task_id])
    return job
コード例 #4
0
 def save_data_table_meta(kv, data_table_name, data_table_namespace):
     """
     save data table meta information
     :param kv: v should be serialized by JSON
     :param data_table_name: table name of this data table
     :param data_table_namespace: table name of this data table
     :return:
     """
     from arch.api.utils.core_utils import json_dumps
     data_meta_table = FateSession.get_instance().table(
         name="%s.meta" % data_table_name,
         namespace=data_table_namespace,
         partition=1,
         create_if_missing=True,
         error_if_exist=False,
         persistent=True,
         in_place_computing=False)
     for k, v in kv.items():
         data_meta_table.put(k, json_dumps(v), use_serialize=False)
コード例 #5
0
def save_version_info(commit_id, data_table_namespace, version_log, tag,
                      branch):
    version_table = get_version_table(
        data_table_namespace=data_table_namespace)
    parent = get_branch_current_commit(version_table=version_table,
                                       branch_name=branch)
    version_info = dict()
    version_info["commitId"] = commit_id
    if parent != commit_id:
        version_info["parent"] = parent
    else:
        version_info.update(
            get_version_info(version_table=version_table, commit_id=parent))
        version_info["repeatCommit"] = True
    version_info["name"] = commit_id
    version_info["namespace"] = data_table_namespace
    version_info["log"] = version_log
    version_info["tag"] = tag
    version_table.put(commit_id, json_dumps(version_info), use_serialize=False)
    # todo: should be use a lock
    version_table.put(branch, commit_id, use_serialize=False)
コード例 #6
0
    def submit_job(job_data, job_id=None):
        if not job_id:
            job_id = generate_job_id()
        schedule_logger(job_id).info('submit job, job_id {}, body {}'.format(job_id, job_data))
        job_dsl = job_data.get('job_dsl', {})
        job_runtime_conf = job_data.get('job_runtime_conf', {})
        job_utils.check_pipeline_job_runtime_conf(job_runtime_conf)
        job_parameters = job_runtime_conf['job_parameters']
        job_initiator = job_runtime_conf['initiator']
        job_type = job_parameters.get('job_type', '')
        if job_type != 'predict':
            # generate job model info
            job_parameters['model_id'] = '#'.join([dtable_utils.all_party_key(job_runtime_conf['role']), 'model'])
            job_parameters['model_version'] = job_id
            train_runtime_conf = {}
        else:
            detect_utils.check_config(job_parameters, ['model_id', 'model_version'])
            # get inference dsl from pipeline model as job dsl
            job_tracker = Tracking(job_id=job_id, role=job_initiator['role'], party_id=job_initiator['party_id'],
                                   model_id=job_parameters['model_id'], model_version=job_parameters['model_version'])
            pipeline_model = job_tracker.get_output_model('pipeline')
            job_dsl = json_loads(pipeline_model['Pipeline'].inference_dsl)
            train_runtime_conf = json_loads(pipeline_model['Pipeline'].train_runtime_conf)
        path_dict = save_job_conf(job_id=job_id,
                                  job_dsl=job_dsl,
                                  job_runtime_conf=job_runtime_conf,
                                  train_runtime_conf=train_runtime_conf,
                                  pipeline_dsl=None)

        job = Job()
        job.f_job_id = job_id
        job.f_roles = json_dumps(job_runtime_conf['role'])
        job.f_work_mode = job_parameters['work_mode']
        job.f_initiator_party_id = job_initiator['party_id']
        job.f_dsl = json_dumps(job_dsl)
        job.f_runtime_conf = json_dumps(job_runtime_conf)
        job.f_train_runtime_conf = json_dumps(train_runtime_conf)
        job.f_run_ip = ''
        job.f_status = JobStatus.WAITING
        job.f_progress = 0
        job.f_create_time = current_timestamp()

        initiator_role = job_initiator['role']
        initiator_party_id = job_initiator['party_id']
        if initiator_party_id not in job_runtime_conf['role'][initiator_role]:
            schedule_logger(job_id).info("initiator party id error:{}".format(initiator_party_id))
            raise Exception("initiator party id error {}".format(initiator_party_id))

        get_job_dsl_parser(dsl=job_dsl,
                           runtime_conf=job_runtime_conf,
                           train_runtime_conf=train_runtime_conf)

        TaskScheduler.distribute_job(job=job, roles=job_runtime_conf['role'], job_initiator=job_initiator)

        # push into queue
        job_event = job_utils.job_event(job_id, initiator_role,  initiator_party_id)
        try:
            RuntimeConfig.JOB_QUEUE.put_event(job_event)
        except Exception as e:
            raise Exception('push job into queue failed')

        schedule_logger(job_id).info(
            'submit job successfully, job id is {}, model id is {}'.format(job.f_job_id, job_parameters['model_id']))
        board_url = BOARD_DASHBOARD_URL.format(job_id, job_initiator['role'], job_initiator['party_id'])
        logs_directory = get_job_log_directory(job_id)
        return job_id, path_dict['job_dsl_path'], path_dict['job_runtime_conf_path'], logs_directory, \
               {'model_id': job_parameters['model_id'],'model_version': job_parameters['model_version']}, board_url