def __init__(self, redis_conn, workflow_id, app_id, config, lang='en'): """Initialize the minion.""" Minion.__init__(self, redis_conn, workflow_id, app_id, config) self.terminate_proc_queue = multiprocessing.Queue() self.execute_process = None self.ping_process = None self.module = None self._state = {} self.config = config self.transpiler = ScikitLearnTranspiler(config) configuration.set_config(self.config) self.tmp_dir = self.config.get('config', {}).get('tmp_dir', '/tmp') sys.path.append(self.tmp_dir) signal.signal(signal.SIGTERM, self._terminate) self.mgr = socketio.RedisManager( config['juicer']['servers']['redis_url'], 'job_output') self.executor = ThreadPoolExecutor(max_workers=1) self.job_future = None self.scikit_learn_config = config['juicer'].get('scikit_learn', {}) # self termination timeout self.active_messages = 0 self.self_terminate = True self.juicer_listener_enabled = False self.current_lang = lang
def __init__(self, redis_conn, workflow_id, app_id, config, lang='en'): Minion.__init__(self, redis_conn, workflow_id, app_id, config) self.terminate_proc_queue = multiprocessing.Queue() self.execute_process = None self.ping_process = None self.reload_code_process = None self.module = None self._state = {} self.transpiler = SparkTranspiler(config) self.config = config configuration.set_config(self.config) self.juicer_listener_enabled = False self.tmp_dir = self.config.get('config', {}).get('tmp_dir', '/tmp') sys.path.append(self.tmp_dir) self.mgr = socketio.RedisManager( config['juicer']['servers']['redis_url'], 'job_output') self.executor = ThreadPoolExecutor(max_workers=1) self.job_future = None # self termination timeout self.active_messages = 0 self.self_terminate = True # Errors and messages self.MNN000 = ('MNN000', _('Success.')) self.MNN001 = ('MNN001', _('Port output format not supported.')) self.MNN002 = ('MNN002', _('Success getting data from task.')) self.MNN003 = ('MNN003', _('State does not exists, processing app.')) self.MNN004 = ('MNN004', _('Invalid port.')) self.MNN005 = ('MNN005', _('Unable to retrieve data because a previous error.')) self.MNN006 = ('MNN006', _('Invalid Python code or incorrect encoding: {}')) self.MNN007 = ('MNN007', _('Job {} was canceled')) self.MNN008 = ('MNN008', _('App {} was terminated')) self.MNN009 = ('MNN009', _('Workflow specification is missing')) # Used in the template file, declared here to gettext detect them self.msgs = [ _('Task running'), _('Task completed'), _('Task ignored (not used by other task or as an output)') ] self.current_lang = lang # self._build_dist_file() signal.signal(signal.SIGTERM, self._terminate) signal.signal(signal.SIGINT, self._cleanup) self.last_job_id = 0 self.new_session = False self.cluster_options = {} self.last_cluster_id = None self.DIST_ZIP_FILE = '/tmp/lemonade-lib-pythoni_{}.zip'.format( self.app_id)
def main(workflow_id, execute_main, params, config, deploy, export_notebook): log.debug(_('Processing workflow queue %s'), workflow_id) tahiti_conf = config['juicer']['services']['tahiti'] resp = query_tahiti(base_url=tahiti_conf['url'], item_path='/workflows', token=str(tahiti_conf['auth_token']), item_id=workflow_id) loader = Workflow(resp, config) # FIXME: Implement validation configuration.set_config(config) ops = query_tahiti( base_url=tahiti_conf['url'], item_path='/operations', token=str(tahiti_conf['auth_token']), item_id='', qs='fields=id,slug,ports.id,ports.slug,ports.interfaces&platform=1') slug_to_op_id = dict([(op['slug'], op['id']) for op in ops]) port_id_to_port = dict([(p['id'], p) for op in ops for p in op['ports']]) try: if loader.platform == "spark": transpiler = SparkTranspiler(configuration.get_config(), slug_to_op_id, port_id_to_port) elif loader.platform == "compss": transpiler = COMPSsTranspiler(configuration.get_config()) elif loader.platform == "scikit-learn": transpiler = ScikitLearnTranspiler(configuration.get_config()) elif loader.platform == 'keras': transpiler = KerasTranspiler(configuration.get_config()) else: raise ValueError( _('Invalid platform value: {}').format(loader.platform)) params['execute_main'] = execute_main transpiler.execute_main = execute_main transpiler.transpile(loader.workflow, loader.graph, params=params, deploy=deploy, export_notebook=export_notebook, job_id=0) except ValueError as ve: log.exception(_("At least one parameter is missing"), exc_info=ve) except: raise
def __init__(self, config, minion_executable, log_dir='/tmp', config_file_path=None): self.minion_support_process = None self.new_minion_watch_process = None self.start_process = None self.minion_status_process = None self.state_control = None self.minion_watch_process = None self.active_minions = {} self.config = config configuration.set_config(config) self.config_file_path = config_file_path self.minion_executable = minion_executable self.log_dir = log_dir or self.config['juicer'].get('log', {}).get( 'path', '/tmp') signal.signal(signal.SIGTERM, self._terminate) self.port_range = list( range(*(config['juicer'].get('minion', {}).get( 'libprocess_port_range', [36000, 36500])))) self.advertise_ip = config['juicer'].get( 'minion', {}).get('libprocess_advertise_ip') # Minion requires 3 different ports: # 1 for libprocess/Mesos communication # 1 for driver port # 1 for block manager self.port_offset = config['juicer'].get('minion', {}).get('port_offset', 100) self.mgr = socketio.RedisManager( config['juicer']['servers']['redis_url'], 'job_output')