def load(params): if (platform.system() != 'Linux' and not os.environ.get('WORKER_FORCE_DOCKER_START')): raise Exception('The docker plugin only works on Linux hosts due to ' 'mapping of shared volumes and pipes between host and ' 'container.') girder_worker.events.bind('run.before', 'docker', before_run) girder_worker.register_executor('docker', executor.run)
def load(params): girder_worker.register_executor('r', executor.run) converters_dir = os.path.join(params['plugin_dir'], 'converters') girder_worker.format.import_converters([ os.path.join(converters_dir, 'r'), os.path.join(converters_dir, 'table'), os.path.join(converters_dir, 'tree') ])
def load(params): # If we have a spark config section then try to setup spark environment if girder_worker.config.has_section('spark') or 'SPARK_HOME' in os.environ: spark.setup_spark_env() girder_worker.register_executor('spark.python', pyspark_executor.run) girder_worker.events.bind('run.before', 'spark', setup_pyspark_task) girder_worker.events.bind('run.finally', 'spark', pyspark_run_cleanup) girder_worker.format.import_converters( os.path.join(params['plugin_dir'], 'converters'))
def load(params): girder_worker.register_executor("julia", executor.run)
def load(params): girder_worker.register_executor('scala', executor.run) girder_worker.register_executor('spark.scala', executor.run_spark)
def load(params): girder_worker.register_executor('swift', executor.run)
def load(params): girder_worker.register_executor('docker', executor.run)