def load(params): from girder_worker.core import events, register_executor import executor events.bind('run.before', params['name'], before_run) events.bind('run.finally', params['name'], task_cleanup) events.bind('cleanup', params['name'], docker_gc) register_executor('docker', executor.run)
def load(params): from girder_worker.core import events, register_executor import executor if (platform.system() != 'Linux' and not os.environ.get('WORKER_FORCE_DOCKER_START')): raise Exception('The docker plugin only works on Linux hosts due to ' 'mapping of shared volumes and pipes between host and ' 'container.') events.bind('run.before', 'docker', before_run) register_executor('docker', executor.run)
def load(params): from girder_worker.core import register_executor, format from . import executor register_executor('r', executor.run) converters_dir = os.path.join(params['plugin_dir'], 'converters') format.import_converters([ os.path.join(converters_dir, 'r'), os.path.join(converters_dir, 'table'), os.path.join(converters_dir, 'tree') ])
def load(params): from girder_worker.core import register_executor from girder_worker.plugins.types import format from . import executor register_executor('r', executor.run) converters_dir = os.path.join(params['plugin_dir'], 'converters') format.import_converters([ os.path.join(converters_dir, 'r'), os.path.join(converters_dir, 'table'), os.path.join(converters_dir, 'tree') ])
def load(params): import girder_worker from girder_worker.core import events, format, register_executor from . import pyspark_executor, spark # If we have a spark config section then try to setup spark environment if girder_worker.config.has_section('spark') or 'SPARK_HOME' in os.environ: spark.setup_spark_env() register_executor('spark.python', pyspark_executor.run) events.bind('run.before', 'spark', setup_pyspark_task) events.bind('run.finally', 'spark', pyspark_run_cleanup) format.import_converters( os.path.join(params['plugin_dir'], 'converters'))
def load(params): from girder_worker.core import register_executor from . import executor register_executor("swift", executor.run)
def load(params): from girder_worker.core import register_executor from . import executor register_executor('scala', executor.run) register_executor('spark.scala', executor.run_spark)