Esempio n. 1
0
def MakeScopeSymbol(job_conf, parallel_conf, is_mirrored):
    return scope_util.MakeInitialScope(
        job_conf,
        parallel_conf.device_tag(),
        list(parallel_conf.device_name()),
        is_mirrored,
    ).symbol_id
Esempio n. 2
0
    def open(self, job_name, signature=None, batch_size=None):
        self._check_status(self.SessionStatus.OPEN)
        c_api_util.JobBuildAndInferCtx_Open(job_name)

        if signature is not None:
            self.set_job_signature(job_name, signature)

        if isinstance(batch_size, int):
            self.set_job_batch_size(job_name, batch_size)

        job_conf = self._get_job_conf(job_name)
        c_api_util.CurJobBuildAndInferCtx_SetJobConf(job_conf)

        tag_and_dev_ids = placement_util.GetDefaultMachineDeviceIds(
            self.config_proto_.resource)
        scope = scope_util.MakeInitialScope(job_conf, *tag_and_dev_ids, None,
                                            self.is_mirrored_)

        with runtime_mode.ModeScope(runtime_mode.GLOBAL_MODE):
            with scope_util.ScopeContext(scope):
                self.cur_job_name_ = job_name
                yield self
                self.cur_job_name_ = None

        oneflow_api.JobBuildAndInferCtx_Close()
Esempio n. 3
0
def MakeScopeSymbol(job_conf_str, parallel_conf_str, is_mirrored):
    job_conf = text_format.Parse(job_conf_str, job_conf_pb.JobConfigProto())
    parallel_conf = text_format.Parse(parallel_conf_str,
                                      placement_pb.ParallelConf())
    return scope_util.MakeInitialScope(job_conf, parallel_conf.device_tag,
                                       list(parallel_conf.device_name),
                                       is_mirrored).symbol_id
Esempio n. 4
0
def InterpretScope(session, function_desc, config_proto):
    job_conf = function_desc.job_config_proto
    job_conf.set_job_name(function_desc.job_func.__name__)
    placement_scope = function_desc.function_attribute.default_placement_scope
    if placement_scope is None:
        tag_and_dev_ids = placement_util.GetDefaultMachineDeviceIds(
            session.resource)
    else:
        assert isinstance(placement_scope, placement_ctx.EmptyPlacementScope)
        tag_and_dev_ids = (
            placement_scope.device_tag,
            placement_scope.machine_device_ids,
        )
    distribute_strategy = function_desc.function_attribute.default_distribute_strategy
    if distribute_strategy is None:
        distribute_strategy = distribute_util.DistributeConsistentStrategy()
    is_mirrored = isinstance(distribute_strategy,
                             distribute_util.DistributeMirroredStrategy)
    scope = scope_util.MakeInitialScope(job_conf, *tag_and_dev_ids,
                                        is_mirrored)
    with _JobBuildAndInferCtx(job_conf.job_name()), distribute_strategy:
        c_api_util.CurJobBuildAndInferCtx_SetJobConf(job_conf)
        with runtime_mode.ModeScope(runtime_mode.GLOBAL_MODE):
            with scope_util.ScopeContext(scope):
                yield
Esempio n. 5
0
def MakeScopeSymbol(job_conf, parallel_conf, is_mirrored):
    parallel_hierarchy = None
    if parallel_conf.has_hierarchy():
        parallel_hierarchy = oneflow._oneflow_internal.Size(
            tuple(parallel_conf.hierarchy().dim()))
    return scope_util.MakeInitialScope(
        job_conf,
        parallel_conf.device_tag(),
        list(parallel_conf.device_name()),
        parallel_hierarchy,
        is_mirrored,
    ).symbol_id