Exemplo n.º 1
0
def load(params):
    if (platform.system() != 'Linux' and
            not os.environ.get('WORKER_FORCE_DOCKER_START')):
        raise Exception('The docker plugin only works on Linux hosts due to '
                        'mapping of shared volumes and pipes between host and '
                        'container.')
    girder_worker.events.bind('run.before', 'docker', before_run)
    girder_worker.register_executor('docker', executor.run)
Exemplo n.º 2
0
def load(params):
    girder_worker.register_executor('r', executor.run)

    converters_dir = os.path.join(params['plugin_dir'], 'converters')
    girder_worker.format.import_converters([
        os.path.join(converters_dir, 'r'),
        os.path.join(converters_dir, 'table'),
        os.path.join(converters_dir, 'tree')
    ])
Exemplo n.º 3
0
def load(params):
    # If we have a spark config section then try to setup spark environment
    if girder_worker.config.has_section('spark') or 'SPARK_HOME' in os.environ:
        spark.setup_spark_env()

    girder_worker.register_executor('spark.python', pyspark_executor.run)

    girder_worker.events.bind('run.before', 'spark', setup_pyspark_task)
    girder_worker.events.bind('run.finally', 'spark', pyspark_run_cleanup)

    girder_worker.format.import_converters(
        os.path.join(params['plugin_dir'], 'converters'))
Exemplo n.º 4
0
def load(params):
    girder_worker.register_executor("julia", executor.run)
Exemplo n.º 5
0
def load(params):
    girder_worker.register_executor('scala', executor.run)
    girder_worker.register_executor('spark.scala', executor.run_spark)
Exemplo n.º 6
0
def load(params):
    girder_worker.register_executor('swift', executor.run)
Exemplo n.º 7
0
def load(params):
    girder_worker.register_executor('docker', executor.run)