def load(params): from girder_worker.core import events, register_executor import executor if (platform.system() != 'Linux' and not os.environ.get('WORKER_FORCE_DOCKER_START')): raise Exception('The docker plugin only works on Linux hosts due to ' 'mapping of shared volumes and pipes between host and ' 'container.') events.bind('run.before', 'docker', before_run) register_executor('docker', executor.run)
def load(params): import girder_worker from girder_worker.core import events, format, register_executor from . import pyspark_executor, spark # If we have a spark config section then try to setup spark environment if girder_worker.config.has_section('spark') or 'SPARK_HOME' in os.environ: spark.setup_spark_env() register_executor('spark.python', pyspark_executor.run) events.bind('run.before', 'spark', setup_pyspark_task) events.bind('run.finally', 'spark', pyspark_run_cleanup) format.import_converters( os.path.join(params['plugin_dir'], 'converters'))
def load(params): from girder_worker.core import events, register_executor import executor events.bind('run.before', params['name'], before_run) events.bind('run.finally', params['name'], task_cleanup) events.bind('cleanup', params['name'], docker_gc) register_executor('docker', executor.run)
def load(params): from girder_worker.app import app @app.task(name='girder_worker.convert') def _convert(*pargs, **kwargs): return convert(*pargs, **kwargs) @app.task(name='girder_worker.validators') def _validators(*pargs, **kwargs): type, format = pargs nodes = [] for (node, data) in conv_graph.nodes(data=True): if type in (None, node.type) and format in (None, node.format): nodes.append({ 'type': node.type, 'format': node.format, 'validator': data }) return nodes events.bind('run.handle_input', params['name'], handle_input) events.bind('run.handle_output', params['name'], handle_output)