def __init__(self): super().__init__( name="resource_monitor", ports=[PluginPort(internal=8890, public=True)], target=PluginTarget.Host, target_role=PluginTargetRole.All, execute="start_monitor.sh", files=[ PluginFile("start_monitor.sh", os.path.join(dir_path, "start_monitor.sh")), PluginFile("etc/telegraf.conf", os.path.join(dir_path, "telegraf.conf")), PluginFile("docker-compose.yml", os.path.join(dir_path, "docker-compose.yml")), ], )
def __init__(self): super().__init__( name="spark_ui_proxy", ports=[PluginPort(internal=9999, public=True)], target_role=PluginTargetRole.Master, execute="spark_ui_proxy.sh", args=["localhost:8080", "9999"], files=[ PluginFile("spark_ui_proxy.sh", os.path.join(dir_path, "spark_ui_proxy.sh")), PluginFile("spark_ui_proxy.py", os.path.join(dir_path, "spark_ui_proxy.py")), ], )
def JupyterLabPlugin(): return PluginConfiguration( name="jupyterlab", ports=[PluginPort(internal=8889, public=True)], target_role=PluginTargetRole.All, execute="jupyter_lab.sh", files=[PluginFile("jupyter_lab.sh", os.path.join(dir_path, "jupyter_lab.sh"))], )
def __init__(self): super().__init__( name="simple", target_role=PluginTargetRole.All, target=PluginTarget.Host, execute="simple.sh", files=[PluginFile("simple.sh", os.path.join(dir_path, "simple.sh"))], )
def InstallPlugin(name, command, packages=None): return PluginConfiguration( name=name, target_role=PluginTargetRole.All, execute="install.sh", files=[PluginFile("install.sh", os.path.join(dir_path, "install.sh"))], args=packages, env=dict(COMMAND=command))
def NvBLASPlugin(): return PluginConfiguration( name="nvblas", ports=[], target_role=PluginTargetRole.All, execute="nvblas.sh", files=[PluginFile("nvblas.sh", os.path.join(dir_path, "nvblas.sh"))], )
def TensorflowOnSparkPlugin(): return PluginConfiguration( name="tensorflow_on_spark", target_role=PluginTargetRole.Master, execute="tensorflow_on_spark.sh", files=[ PluginFile("tensorflow_on_spark.sh", os.path.join(dir_path, "tensorflow_on_spark.sh")), ], )
def RStudioServerPlugin(version="1.1.383"): return PluginConfiguration( name="rstudio_server", ports=[PluginPort(internal=8787, public=True)], target_role=PluginTargetRole.Master, execute="rstudio_server.sh", files=[ PluginFile("rstudio_server.sh", os.path.join(dir_path, "rstudio_server.sh")) ], env=dict(RSTUDIO_SERVER_VERSION=version), )
def __init__(self): super().__init__( name="jupyter", ports=[ PluginPort( internal=8888, public=True, ), ], target_role=PluginTargetRole.All, execute="jupyter.sh", files=[ PluginFile("jupyter.sh", os.path.join(dir_path, "jupyter.sh")), ], )
def __init__(self): super().__init__( name="jupyterlab", ports=[ PluginPort( internal=8889, public=True, ), ], run_on=PluginRunTarget.All, execute="jupyter_lab.sh", files=[ PluginFile("jupyter_lab.sh", os.path.join(dir_path, "jupyter_lab.sh")), ], )
def __init__(self, version="1.1.383"): super().__init__( name="rstudio_server", ports=[ PluginPort( internal=8787, public=True, ), ], run_on=PluginRunTarget.Master, execute="rstudio_server.sh", files=[ PluginFile("rstudio_server.sh", os.path.join(dir_path, "rstudio_server.sh")), ], env=dict(RSTUDIO_SERVER_VERSION=version), )
def __init__(self): super().__init__( name="hdfs", ports=[ PluginPort(name="File system metadata operations", internal=8020), PluginPort(name="File system metadata operations(Backup)", internal=9000), PluginPort(name="Datanode data transfer", internal=50010), PluginPort(name="Datanode IPC metadata operations", internal=50020), PluginPort(name="Namenode", internal=50070, public=True), PluginPort(name="Datanodes", internal=50075, public=True), ], target_role=PluginTargetRole.All, execute="hdfs.sh", files=[PluginFile("hdfs.sh", os.path.join(dir_path, "hdfs.sh"))], )