# Default logging level of paramiko produces too much noise - raise to warning only. logging.getLogger('paramiko').setLevel( os.getenv('EG_SSH_LOG_LEVEL', logging.WARNING)) # Pop certain env variables that don't need to be logged, e.g. password env_pop_list = ['EG_REMOTE_PWD', 'LS_COLORS'] password = os.getenv('EG_REMOTE_PWD') # this should use password-less ssh default_kernel_launch_timeout = float( os.getenv('EG_KERNEL_LAUNCH_TIMEOUT', '30')) max_poll_attempts = int(os.getenv('EG_MAX_POLL_ATTEMPTS', '10')) poll_interval = float(os.getenv('EG_POLL_INTERVAL', '0.5')) socket_timeout = float(os.getenv('EG_SOCKET_TIMEOUT', '5.0')) tunneling_enabled = bool( os.getenv('EG_ENABLE_TUNNELING', 'False').lower() == 'True'.lower()) local_ip = localinterfaces.public_ips()[0] tunnel_ip = localinterfaces.local_ips()[0] class BaseProcessProxyABC(with_metaclass(abc.ABCMeta, object)): """Process Proxy ABC. Defines the required methods for process proxy classes """ def __init__(self, kernel_manager): self.kernel_manager = kernel_manager # use the zero-ip from the start, can prevent having to write out connection file again self.kernel_manager.ip = '0.0.0.0' self.log = kernel_manager.log # extract the kernel_id string from the connection file and set the KERNEL_ID environment variable self.kernel_id = os.path.basename(self.kernel_manager.connection_file). \
c.JupyterHub.allow_named_servers = True from cdsdashboards.app import CDS_TEMPLATE_PATHS from cdsdashboards.hubextension import cds_extra_handlers c.JupyterHub.template_paths = CDS_TEMPLATE_PATHS c.JupyterHub.extra_handlers = cds_extra_handlers c.Spawner.debug = True from dockerspawner import DockerSpawner from jupyter_client.localinterfaces import public_ips c.JupyterHub.hub_ip = public_ips()[0] c.JupyterHub.spawner_class = DockerSpawner c.CDSDashboardsConfig.builder_class = 'cdsdashboards.builder.dockerbuilder.DockerBuilder' #from jupyter_client.localinterfaces import public_ips #c.JupyterHub.hub_ip = public_ips()[0] c.DockerSpawner.use_internal_ip = True c.DockerSpawner.network_name = 'e2etestnetwork' # Pass the network name as argument to spawned containers c.DockerSpawner.extra_host_config = {'network_mode': 'e2etestnetwork'} notebook_dir = '/home/jovyan'
c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner' # JupyterHub always runs image tagged as "aiidalab-docker-stack:latest" on local docker instance c.DockerSpawner.image = "aiidalab-docker-stack:latest" c.DockerSpawner.extra_host_config.update({ # take care of lost child processes # see also: https://github.com/krallin/tini/issues/8 'init': True, # constrain CPU usage, see # https://docs.docker.com/config/containers/resource_constraints/#cpu # https://github.com/docker/docker-py/blob/7911c5463557acce7f0def1d9572742ef04a98f5/docker/api/container.py#L432 'cpu_period': 100000, 'cpu_quota': {{(aiidalab_server_cpu_limit * 100000) | int}}, }) c.DockerSpawner.volumes = {'/var/jupyterhub/volumes/{username}': '/home/aiida'} c.DockerSpawner.remove_containers = True c.JupyterHub.hub_ip = public_ips()[0] # default loopback port doesn't work c.Spawner.start_timeout = 180 c.Spawner.http_timeout = 120 c.Spawner.mem_limit = '{{ aiidalab_server_mem_limit }}' #=============================================================================== # Simple FirstUseAuthenticator to get started from firstuseauthenticator import FirstUseAuthenticator c.JupyterHub.authenticator_class = FirstUseAuthenticator c.FirstUseAuthenticator.create_users = {{aiidalab_server_create_users}} #=============================================================================== # OAuth2 authenticator example below #from oauthenticator.generic import GenericOAuthenticator, GenericEnvMixin # key for encrypting access_token generated using `openssl rand -hex 32`
# Set up auth environment c.LSSTAuth.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL'] c.LSSTAuth.client_id = os.environ['OAUTH_CLIENT_ID'] c.LSSTAuth.client_secret = os.environ['OAUTH_CLIENT_SECRET'] ghowl = os.environ.get('GITHUB_ORGANIZATION_WHITELIST') if ghowl: c.LSSTAuth.github_organization_whitelist = set(ghowl.split(",")) # Don't try to cleanup servers on exit - since in general for k8s, we want # the hub to be able to restart without losing user containers c.JupyterHub.cleanup_servers = False # Set Session DB URL if we have one db_url = os.getenv('SESSION_DB_URL') if db_url: c.JupyterHub.db_url = db_url # Allow style overrides c.JupyterHub.template_paths = ["/opt/lsst/software/jupyterhub/templates/"] hub_route = os.environ.get('HUB_ROUTE') or "/" if hub_route != '/': c.JupyterHub.base_url = hub_route # Set the Hub URLs c.JupyterHub.bind_url = 'http://0.0.0.0:8000' + hub_route c.JupyterHub.hub_bind_url = 'http://0.0.0.0:8081' + hub_route k8s_svc_address = os.environ.get('JLD_HUB_SERVICE_HOST') or public_ips()[0] c.JupyterHub.hub_connect_url = "http://" + k8s_svc_address + ":8081" + \ hub_route
def load_from_environment(self): """Populate attributes from environment variables.""" # We already checked DEBUG in __init__ # FQDN should uniquely identify an instance self.fqdn = os.getenv("FQDN") or "localhost" # Authentication parameters self.jwt_logout_url = os.getenv("LOGOUT_URL") or "/logout" self.strict_ldap_groups = str_bool(os.getenv("STRICT_LDAP_GROUPS")) # Settings for Options Form self.form_selector_title = (os.getenv("LAB_SELECTOR_TITLE") or "Container Image Selector") self.form_template = os.getenv("OPTIONS_FORM_TEMPLATE") or ( "/opt/lsst/software/jupyterhub/templates/" + "options_form.template.html") self.form_sizelist = listify(os.getenv("OPTIONS_FORM_SIZELIST")) or [ "tiny", "small", "medium", "large", ] self.max_scan_delay = intify(os.getenv("MAX_SCAN_DELAY"), 300) self.initial_scan_interval = floatify( os.getenv("INITIAL_SCAN_INTERVAL"), 0.2) self.max_scan_interval = floatify(os.getenv("MAX_SCAN_INTERVAL"), 5.0) self.tiny_cpu_max = floatify(os.getenv("TINY_CPU_MAX"), 0.5) self.mb_per_cpu = intify(os.getenv("MB_PER_CPU"), 2048) self.size_index = intify(os.getenv("SIZE_INDEX"), 1) # Settings for Quota Manager self.resource_map = (os.getenv("RESOURCE_MAP") or "/opt/lsst/software/jupyterhub/resources/" + "resourcemap.json") self.max_dask_workers = int(os.getenv("MAX_DASK_WORKERS", 25)) # Settings for Volume Manager self.volume_definition_file = os.getenv("VOLUME_DEFINITION_FILE") or ( "/opt/lsst/software/jupyterhub/" + "mounts/mountpoints.json") self.base_passwd_file = ( os.getenv("BASE_PASSWD_DEFINITION_FILE") or "/opt/lsst/software/jupyterhub/base-files/passwd") self.base_group_file = ( os.getenv("BASE_GROUP_DEFINITION_FILE") or "/opt/lsst/software/jupyterhub/base-files/group") # Hub settings for Lab and Workflow spawning self.lab_no_sudo = str_bool(os.getenv("LAB_NO_SUDO")) self.lab_uid = intify(os.getenv("LAB_UID"), 769) self.lab_gid = intify(os.getenv("LAB_GID"), 769) self.lab_fs_gid = intify(os.getenv("LAB_FS_GID"), self.lab_gid) self.lab_default_image = (os.getenv("LAB_IMAGE") or "lsstsqre/sciplat-lab:latest") self.lab_size_range = os.getenv("LAB_SIZE_RANGE") or "4.0" self.cull_timeout = os.getenv("LAB_CULL_TIMEOUT") or "64800" self.cull_policy = os.getenv("LAB_CULL_POLICY") or "idle:remote" self.allow_dask_spawn = str_bool(os.getenv("ALLOW_DASK_SPAWN")) self.restrict_dask_nodes = os.getenv("RESTRICT_DASK_NODES") self.restrict_lab_nodes = os.getenv("RESTRICT_LAB_NODES") self.lab_nodejs_max_mem = os.getenv("LAB_NODEJS_MAX_MEM") or "6144" self.external_hub_url = os.getenv("EXTERNAL_HUB_URL") self.hub_route = os.getenv("HUB_ROUTE") or "/" self.external_instance_url = os.getenv("EXTERNAL_INSTANCE_URL") self.firefly_route = os.getenv("FIREFLY_ROUTE") or "/firefly" self.js9_route = os.getenv("JS9_ROUTE") or "/js9" self.api_route = os.getenv("API_ROUTE") or "/api" self.tap_route = os.getenv("TAP_ROUTE") or "/api/tap" self.soda_route = os.getenv("SODA_ROUTE") or "/api/image/soda" self.workflow_route = os.getenv("WORKFLOW_ROUTE") or "/wf" self.gafaelfawr_route = os.getenv("GAFAELFAWR_ROUTE") or "/auth" self.external_firefly_url = os.getenv("EXTERNAL_FIREFLY_URL") self.external_js9_url = os.getenv("EXTERNAL_JS9_URL") self.external_api_url = os.getenv("EXTERNAL_API_URL") self.external_tap_url = os.getenv("EXTERNAL_TAP_URL") self.external_soda_url = os.getenv("EXTERNAL_SODA_URL") self.external_workflow_url = os.getenv("EXTERNAL_WORKFLOW_URL") self.external_gafaelfawr_url = os.getenv("EXTERNAL_GAFAELFAWR_URL") self.auto_repo_urls = os.getenv("AUTO_REPO_URLS") # Pull secret name self.pull_secret_name = os.getenv("PULL_SECRET_NAME", "pull-secret") # Prepuller settings self.lab_repo_owner = os.getenv("LAB_REPO_OWNER") or "lsstsqre" self.lab_repo_name = os.getenv("LAB_REPO_NAME") or "sciplat-lab" self.lab_repo_host = os.getenv("LAB_REPO_HOST") or "hub.docker.com" self.prepuller_namespace = (os.getenv("PREPULLER_NAMESPACE") or get_execution_namespace()) self.prepuller_experimentals = intify( os.getenv("PREPULLER_EXPERIMENTALS"), 0) self.prepuller_dailies = intify(os.getenv("PREPULLER_DAILIES"), 3) self.prepuller_weeklies = intify(os.getenv("PREPULLER_WEEKLIES"), 2) self.prepuller_releases = intify(os.getenv("PREPULLER_RELEASES"), 1) self.prepuller_cachefile = os.getenv("PREPULLER_CACHEFILE", "/tmp/repo-cache.json") self.prepuller_timeout = os.getenv("PREPULLER_TIMEOUT", 3300) cstr = 'echo "Prepuller for $(hostname) completed at $(date)."' self.prepuller_command = ["/bin/sh", "-c", cstr] prp_cmd = os.getenv("PREPULLER_COMMAND_JSON") if prp_cmd: self.prepuller_command = json.loads(prp_cmd) # Reaper settings self.reaper_keep_experimentals = intify( os.getenv("REAPER_KEEP_EXPERIMENTALS"), 10) self.reaper_keep_dailies = intify(os.getenv("REAPER_KEEP_DAILIES"), 15) self.reaper_keep_weeklies = intify(os.getenv("REAPER_KEEP_WEEKLIES"), 78) # Fileserver settings self.fileserver_host = os.getenv( "EXTERNAL_FILESERVER_IP") or os.getenv("FILESERVER_SERVICE_HOST") # Reaper settings # These have to stay for Docker Hub self.reaper_user = os.getenv("IMAGE_REAPER_USER") self.reaper_password = os.getenv("IMAGE_REAPER_PASSWORD") # Hub internal settings my_ip = public_ips()[0] self.instance_name = os.getenv("INSTANCE_NAME") self.hub_host = os.getenv("HUB_SERVICE_HOST") or my_ip self.hub_api_port = os.getenv("HUB_SERVICE_PORT_API") or 8081 self.proxy_host = os.getenv("PROXY_SERVICE_HOST") or my_ip self.proxy_api_port = os.getenv("PROXY_SERVICE_PORT_API") or 8001 self.session_db_url = os.getenv("SESSION_DB_URL") # SAL-specific settings # Pod multicast self.enable_multus = str_bool(os.getenv("ENABLE_MULTUS")) # Interface for instrument control self.lab_dds_interface = os.getenv("LSST_DDS_INTERFACE") or "lo" # DDS domain (used with SAL) self.lab_dds_domain = os.getenv("LSST_DDS_DOMAIN") or "citest" self.lab_dds_partition_prefix = (os.getenv("LSST_DDS_PARTITION_PREFIX") or "citest") # these have to be set post-initialization to avoid a circular # dependency. self.authenticator_class = None self.spawner_class = None # Lab settings self.api_token = os.getenv("JUPYTERHUB_API_TOKEN") or "" self.hub_api = os.getenv("JUPYTERHUB_API_URL") or "" self.user = os.getenv("JUPYTERHUB_USER") or "" # Butler settings self.butler_s3_endpoint_url = os.getenv("BUTLER_S3_ENDPOINT_URL") or "" self.butler_aws_access_key = os.getenv("BUTLER_AWS_ACCESS_KEY") or "" self.butler_aws_secret_key = os.getenv("BUTLER_AWS_SECRET_KEY") or "" self.butler_pgpassword = os.getenv("BUTLER_PGPASSWORD") or "" # Moneypenny settings self.turn_on_moneypenny = str_bool(os.getenv("TURN_ON_MONEYPENNY")) # Firefly settings self.firefly_html = os.getenv("FIREFLY_HTML") or "slate.html" self.firefly_lab_extension = True self.firefly_channel_lab = os.getenv("fireflyChannelLab") or ( "ffChan-" + (os.getenv("HOSTNAME") or "") + "-" + str(int(time.time())) + "%02d" % random.randint(0, 99))
def load_from_environment(self): '''Populate attributes from environment variables. ''' # We already checked DEBUG in __init__ # FQDN should uniquely identify an instance self.fqdn = os.getenv("FQDN") or 'localhost' # Authentication parameters self.jwt_logout_url = os.getenv("LOGOUT_URL") or '/logout' self.strict_ldap_groups = str_bool(os.getenv('STRICT_LDAP_GROUPS')) # Settings for Options Form self.form_selector_title = (os.getenv('LAB_SELECTOR_TITLE') or 'Container Image Selector') self.form_template = ( os.getenv('OPTIONS_FORM_TEMPLATE') or ('/opt/lsst/software/jupyterhub/templates/' + 'options_form.template.html')) self.form_sizelist = (listify( os.getenv('OPTIONS_FORM_SIZELIST')) or ['tiny', 'small', 'medium', 'large']) self.max_scan_delay = intify(os.getenv('MAX_SCAN_DELAY'), 300) self.initial_scan_interval = floatify( os.getenv('INITIAL_SCAN_INTERVAL'), 0.2) self.max_scan_interval = floatify( os.getenv('MAX_SCAN_INTERVAL'), 5.0) self.tiny_cpu_max = floatify(os.getenv('TINY_CPU_MAX'), 0.5) self.mb_per_cpu = intify(os.getenv('MB_PER_CPU'), 2048) self.size_index = intify(os.getenv('SIZE_INDEX'), 1) # Settings for Quota Manager self.resource_map = (os.getenv('RESOURCE_MAP') or '/opt/lsst/software/jupyterhub/resources/' + 'resourcemap.json') self.max_dask_workers = int(os.getenv('MAX_DASK_WORKERS', 25)) # Settings for Volume Manager self.volume_definition_file = ( os.getenv('VOLUME_DEFINITION_FILE') or ('/opt/lsst/software/jupyterhub/' + 'mounts/mountpoints.json')) self.base_passwd_file = ( os.getenv('BASE_PASSWD_DEFINITION_FILE') or '/opt/lsst/software/jupyterhub/base-files/passwd') self.base_group_file = ( os.getenv('BASE_GROUP_DEFINITION_FILE') or '/opt/lsst/software/jupyterhub/base-files/group') # Hub settings for Lab and Workflow spawning self.lab_no_sudo = str_bool(os.getenv('LAB_NO_SUDO')) self.lab_uid = intify(os.getenv('LAB_UID'), 769) self.lab_gid = intify(os.getenv('LAB_GID'), 769) self.lab_fs_gid = intify(os.getenv('LAB_FS_GID'), self.lab_gid) self.lab_default_image = (os.getenv('LAB_IMAGE') or "lsstsqre/sciplat-lab:latest") self.lab_size_range = os.getenv('LAB_SIZE_RANGE') or '4.0' self.cull_timeout = os.getenv('LAB_CULL_TIMEOUT') or '64800' self.cull_policy = os.getenv('LAB_CULL_POLICY') or 'idle:remote' self.allow_dask_spawn = str_bool(os.getenv('ALLOW_DASK_SPAWN')) self.restrict_dask_nodes = os.getenv('RESTRICT_DASK_NODES') self.restrict_lab_nodes = os.getenv('RESTRICT_LAB_NODES') self.lab_nodejs_max_mem = os.getenv('LAB_NODEJS_MAX_MEM') or '6144' self.external_hub_url = os.getenv('EXTERNAL_HUB_URL') self.hub_route = os.getenv('HUB_ROUTE') or '/' self.external_instance_url = os.getenv('EXTERNAL_INSTANCE_URL') self.firefly_route = os.getenv('FIREFLY_ROUTE') or '/firefly' self.js9_route = os.getenv('JS9_ROUTE') or '/js9' self.api_route = os.getenv('API_ROUTE') or '/api' self.tap_route = os.getenv('TAP_ROUTE') or '/api/tap' self.soda_route = os.getenv('SODA_ROUTE') or '/api/image/soda' self.workflow_route = os.getenv('WORKFLOW_ROUTE') or '/wf' self.external_firefly_url = os.getenv('EXTERNAL_FIREFLY_URL') self.external_js9_url = os.getenv('EXTERNAL_JS9_URL') self.external_api_url = os.getenv('EXTERNAL_API_URL') self.external_tap_url = os.getenv('EXTERNAL_TAP_URL') self.external_soda_url = os.getenv('EXTERNAL_SODA_URL') self.external_workflow_url = os.getenv('EXTERNAL_WORKFLOW_URL') self.auto_repo_urls = os.getenv('AUTO_REPO_URLS') # Prepuller settings self.lab_repo_owner = os.getenv('LAB_REPO_OWNER') or 'lsstsqre' self.lab_repo_name = os.getenv('LAB_REPO_NAME') or 'sciplat-lab' self.lab_repo_host = os.getenv('LAB_REPO_HOST') or 'hub.docker.com' self.prepuller_namespace = (os.getenv('PREPULLER_NAMESPACE') or get_execution_namespace()) self.prepuller_experimentals = intify( os.getenv('PREPULLER_EXPERIMENTALS'), 0) self.prepuller_dailies = intify(os.getenv('PREPULLER_DAILIES'), 3) self.prepuller_weeklies = intify( os.getenv('PREPULLER_WEEKLIES'), 2) self.prepuller_releases = intify( os.getenv('PREPULLER_RELEASES'), 1) self.prepuller_cachefile = os.getenv('PREPULLER_CACHEFILE', '/tmp/repo-cache.json') self.prepuller_timeout = os.getenv('PREPULLER_TIMEOUT', 3300) cstr = "echo \"Prepuller for $(hostname) completed at $(date).\"" self.prepuller_command = ["/bin/sh", "-c", cstr] prp_cmd = os.getenv('PREPULLER_COMMAND_JSON') if prp_cmd: self.prepuller_command = json.loads(prp_cmd) # Reaper settings self.reaper_keep_experimentals = intify( os.getenv('REAPER_KEEP_EXPERIMENTALS'), 10) self.reaper_keep_dailies = intify( os.getenv('REAPER_KEEP_DAILIES'), 15) self.reaper_keep_weeklies = intify( os.getenv('REAPER_KEEP_WEEKLIES'), 78) # Fileserver settings self.fileserver_host = (os.getenv('EXTERNAL_FILESERVER_IP') or os.getenv('FILESERVER_SERVICE_HOST')) # Reaper settings self.reaper_user = os.getenv('IMAGE_REAPER_USER') self.reaper_password = os.getenv('IMAGE_REAPER_PASSWORD') # Hub internal settings my_ip = public_ips()[0] self.instance_name = os.getenv('INSTANCE_NAME') self.hub_host = os.getenv('HUB_SERVICE_HOST') or my_ip self.hub_api_port = os.getenv('HUB_SERVICE_PORT_API') or 8081 self.proxy_host = os.getenv('PROXY_SERVICE_HOST') or my_ip self.proxy_api_port = os.getenv('PROXY_SERVICE_PORT_API') or 8001 self.session_db_url = os.getenv('SESSION_DB_URL') # Pod multicast (used with SAL) self.enable_multus = str_bool(os.getenv('ENABLE_MULTUS')) # DDS domain (used with SAL) self.lab_dds_domain = os.getenv('LSST_DDS_DOMAIN') # These have to be set post-initialization to avoid a circular # dependency. self.authenticator_class = None self.spawner_class = None
import os from jupyter_client.localinterfaces import public_ips ip = public_ips()[0] c.Application.log_level = 'DEBUG' c.JupyterHub.hub_ip = ip c.JupyterHub.ip = '0.0.0.0' c.JupyterHub.port = 443 c.JupyterHub.cookie_secret_file = '/opt/jupyterhub/jupyterhub_cookie_secret' c.JupyterHub.db_url = 'sqlite:////opt/jupyterhub/jupyterhub.sqlite' c.JupyterHub.hub_port = 8181 c.JupyterHub.ssl_key = '/opt/jupyterhub/ssl/key.pem' c.JupyterHub.ssl_cert = '/opt/jupyterhub/ssl/cert.pem' c.JupyterHub.authenticator_class = 'dummyauthenticator.DummyAuthenticator' c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner' c.DockerSpawner.image = 'bioinf-jupyterhub-singleuser' # This should limit the env variables from jupyterhub to jupter notebook. # Not sure if it works when notebook is started by sudospawner. #c.Spawner.env_keep = ['PATH', # 'PYTHONPATH'] c.Authenticator.admin_users = {'kaur'}
#c.Spawner.args = [] ## The command used for starting notebooks. #c.Spawner.cmd = ['jupyterhub-singleuser'] ## Enable debug-logging of the single-user server c.Spawner.debug = True ## Whitelist of environment variables for the subprocess to inherit c.Spawner.env_keep = [ 'PATH', 'PYTHONPATH', 'PYTHONIOENCODING', 'CONDA_ROOT', 'CONDA_DEFAULT_ENV', 'VIRTUAL_ENV', 'LANG', 'LC_CTYPE', 'LC_ALL' ] # spawn with Docker c.JupyterHub.spawner_class = 'dockerspawner.SystemUserSpawner' # container image c.SystemUserSpawner.container_image = 'huoty/jupyterhub-systemuser' # The docker instances need access to the Hub, so the default loopback port doesn't work: from jupyter_client.localinterfaces import public_ips print(public_ips()) c.SystemUserSpawner.container_ip = public_ips()[0] # read only volumes c.DockerSpawner.read_only_volumes = { '/dev/shm': '/dev/shm', '/etc/hosts': '/etc/hosts', }
c = get_config() c.Jupyterhub.admin_access = True c.Authenticator.admin_users = {'greg'} c.JupyterHub.authenticator_class = 'jupyterhub.auth.PAMAuthenticator' c.JupyterHub.config_file = '/etc/jupyterhub/jupyterhub_config.py' c.JupyterHub.cookie_secret_file = '/etc/jupyterhub/jupyterhub_cookie_secret' c.LocalAuthenticator.create_system_users = True c.Spawner.notebook_dir = '~' c.JupyterHub.db_url = '/etc/jupyterhub/jupyterhub.sqlite' from jupyter_client.localinterfaces import public_ips c.JupyterHub.ip = public_ips()[0] c.JupyterHub.port = 8000 c.JupyterHub.ssl_cert = '/srv/sslcert.pem' c.JupyterHub.ssl_key = '/srv/sslkey.pem' c.Spawner.cmd = ['jupyter-labhub']
# Configuration file for jupyterhub. import os from jupyter_client.localinterfaces import public_ips c = get_config() # noqa c.JupyterHub.ssl_key = 'test.key' c.JupyterHub.ssl_cert = 'test.crt' c.JupyterHub.hub_ip = public_ips()[0] if len(public_ips()) else '127.0.0.1' c.JupyterHub.logo_file = os.path.join( os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'remoteappmanager/static/images/header_logo.png') # Choose between system-user mode and virtual-user mode setting_mode = ('system_user', 'virtual_user')[1] if setting_mode == 'virtual_user': c.JupyterHub.spawner_class = ('remoteappmanager.jupyterhub.spawners.' + 'VirtualUserSpawner') # Parent directory in which temporary directory is created for # each virtual user # Set this to a drive with well defined capacity quota # If unset, no workspace would be available c.Spawner.workspace_dir = '/tmp/remoteapp' # Uncomment this if you want to enable the use of the configuration # file for the spawned service, instead of using the defaults. # c.Spawner.config_file_path = 'remoteappmanager_config.py'
c.JupyterHub.pid_file = '/root/jupyterhub.pid' c.JupyterHub.cleanup_servers = False c.JupyterHub.spawner_class = DSAISpawner c.DSAISpawner.use_internal_ip = True c.DSAISpawner.remove = True c.DSAISpawner.spark_driver_port = 30000 c.DSAISpawner.spark_blockmanager_port = 31000 c.DSAISpawner.spark_ui_port = 4500 c.DSAISpawner.spark_max_sessions_per_user = 10 c.DSAISpawner.http_timeout = 60 c.DSAISpawner.user_cgroup_parent = { 'bank\\user1' : '/jupyter-cgroup-1', # user 1 'bank\\user2' : '/jupyter-cgroup-1', # user 2 'bank\\user3' : '/jupyter-cgroup-2', # user 3 } c.DSAISpawner.cgroup_parent = '/jupyter-cgroup-3' c.JupyterHub.hub_ip = '0.0.0.0' c.JupyterHub.hub_connect_ip = public_ips()[0]
"bind": "/home/vagrant/jupyter/workshop/{username}", "mode": { "rw": [ "instructors" ] } } } }''' # this doesn't seem to work # c.JupyerHub.active_server_limit = 2 #c.JupyterHub.template_vars = {'announcement': 'some_text'} c.JupyterHub.confirm_no_ssl = True c.JupyterHub.cookie_secret = base64.b64decode('qBdGBamOJTk5REgm7GUdsReB4utbp4g+vBja0SwY2IQojyCxA+CwzOV5dTyPJWvK13s61Yie0c/WDUfy8HtU2w==') # hardwired network for v*.radia.run c.JupyterHub.hub_ip = [i for i in public_ips() if i.startswith('10.10.10.')][0] c.JupyterHub.ip = '0.0.0.0' c.JupyterHub.port = 8000 # NEED THIS to keep servers alive after restart of hub # this doesn't work if False # docker.errors.APIError: 409 Client Error: Conflict ("Conflict. The container name "/jupyter-vagrant" is # already in use by container "cb44afddee641143a798d6ee1dfa508014f4e4fbf097307d73702ee57664b652". c.JupyterHub.cleanup_servers = False # NEED THIS so people can restart their containers for real c.DockerSpawner.remove = True c.JupyterHub.proxy_auth_token = '+UFr+ALeDDPR4jg0WNX+hgaF0EV5FNat1A3Sv0swbrg=' # Debugging only