""" from threading import Thread from time import sleep import logging import json import shutil, os from dataiku.doctor.server import serve from dataiku.doctor.utils import interrupt_optimization from .runner import setup_log, read_execution, load_libs, send_files, fetch_file from dataiku.base.remoterun import read_dku_env_and_set if __name__ == "__main__": setup_log() read_dku_env_and_set() execution = read_execution() execution_id = execution['id'] definition = json.loads(execution['definition']) # thread that pushes the model states to the backend stopping = not definition.get("pushModelState", True) def send_model_updates(): delay = 2 while not stopping: sleep(delay) files_list = [ 'm*/train_info.json', 'm*/keras_model_training_info.json', 'm*/tensorboard_logs', 'm*/grid_search_scores.json'
if definition['recipeType'] == 'r' or definition[ 'recipeType'].startswith('CustomCode_'): with open('code.R', 'w') as fd: fd.write(execution['payload'].encode('utf8')) run_subprocess( execution_id, '/bin/sh -c "EXECUTION_ID=%s %s --quiet --no-save --args code.R < %s/R/R-exec-wrapper.R"' % (execution_id, r_bin, OPT_DIR)) else: raise Exception("Unsupported recipe type: %s" % definition['recipeType']) elif execution['type'] == 'NOTEBOOK_PYTHON' or execution[ 'type'] == 'NOTEBOOK_R': logging.info("Executing notebook") read_dku_env_and_set(no_fail=False, force=True) fetch_libs(execution_id, 'instance') fetch_libs(execution_id, 'project') if execution['type'] == 'NOTEBOOK_PYTHON': load_libs() os.environ['PYTHONPATH'] = '/opt/dataiku/python' else: set_env_for_r_libs() definition = json.loads(execution['definition']) # fixup env vars that pass the libraries to the subprocess if execution['type'] == 'NOTEBOOK_PYTHON': old_libs_var = os.environ.get('PYTHONPATH', '')