Example #1
0
def start_tensorflow(model_name,
                     client=None,
                     options=None,
                     port=TF_PORT,
                     **kwargs):
    """ Start Tensorflow on Dask Cluster

    This launches Tensorflow Servers alongside Dask workers in-process

    Examples
    --------
    >>> client = Client('dask-scheduler-address:8786')
    >>> tf_spec, dask_spec = start_tensorflow(client)
    >>> tf_spec.as_dict()
    {'worker': ['192.168.1.100:2222', '192.168.1.101:2222']}

    Specify desired number of jobs types as keyword args
    >>> tf_config = tf.compat.v1.OptimizerOptions()
    >>> tf_config.GlobalJitLevel = tf_config.OFF
    >>> tf_config.do_function_inlining = True
    >> tf_config.opt_level
    >>> tf_config.gpu_options.force_gpu_compatible = True


    >>> tf_spec, dask_spec = start_tensorflow(client, tf_config=tf_config, chief=1, master=1, ps=2, worker=30)
    >>> tf_spec.as_dict()
    {
     'chief': ['192.168.1.1:2222'],
     'master': ['192.168.1.1:2223'],
        'ps': ['192.168.1.104:2222', '192.168.1.105:2222'],
    'worker': ['192.168.1.100:2222', '192.168.1.101:2222',
                '192.168.1.102:2222', '192.168.1.103:2222']
    }
    """
    client = client if client is not None else global_cluster(
        asynchronous=True)
    global_future = Future()
    tensorflow_scheduler_wrap = partial(tensorflow_scheduler,
                                        global_future,
                                        model_name,
                                        client=client,
                                        tf_port=port,
                                        **kwargs)
    if client.asynchronous:
        global_future.add_done_callback(lambda fu: client.loop.stop())
        client.loop.add_callback(tensorflow_scheduler_wrap)
        client.loop.start()
        result = global_future.result()
    else:
        result = client.sync(client.loop, tensorflow_scheduler_wrap)
    return result