def execTaskChain(chainCode): #获取任务链 taskChainDao = TaskChainDao() taskChains = taskChainDao.getTaskChain(chainCode); for taskChain in taskChains: chainCode = taskChain.get('TASK_CHAIN_CODE'); logging.warning("当前正在执行的任务链:[%s]%s"%(chainCode,taskChain.get('TASK_CHAIN_NAME'))) #获取任务链下所有任务 taskDao = TaskDao(); taskList = taskDao.getTaskByChain(chainCode); #获取登录任务、第一链层任务 loginTask = taskDao.getLoginTask(chainCode); firstTasks = [] for task in taskList: parentTaskCode = task.get('BELONG_TASK_CODE'); if loginTask and loginTask.get('TASK_CODE')==parentTaskCode: firstTasks.append(task) elif not loginTask and not parentTaskCode: firstTasks.append(task) #获取任务链下所有任务层级 chainLevels = taskDao.getTaskChainLevel(chainCode) # 获取输入参数 dictMappingDao = DictMappingDao(); return { 'taskList':taskList, 'loginTask':loginTask, 'firstTasks':firstTasks, 'chainLevels':chainLevels, 'inputParms':dictMappingDao.getInputDictByTaskChain(chainCode), 'outputParams':dictMappingDao.getOutputDictByTaskChain(chainCode) }
def insertNewTask(task_form): """ Create a new task and save the configuration information. Parameters: task_form: Task information. Return: task_form: Response for debug and confirmation; task_config: Configure celery tasks; status: Success or not """ counter = 0 task_name = task_form.get('task_name') task_config = {} task_config['proj_name'] = task_form.get('proj_name') task_config['test_var_data_x'] = task_form.get('test_var_data_x') task_config['group_var_data_y'] = task_form.get('group_var_data_y') task_form['task_config'] = task_config task_form['task_id'] = 'TASK' + time.strftime( '%y%m%d%H%M%S') + '{:02d}'.format(counter) if task_name: task_form['task_name'] = task_name else: task_form['task_name'] = task_form['task_id'] taskDao = TaskDao() taskDao.insertNewTask(task_form) # create new celery task new_sa_celery_task.delay(taskid=task_form['task_id'], tasktype=task_form['task_type'], testvardatax=task_config['test_var_data_x'], groupvardatay=task_config['group_var_data_y']) return
def new_ml_celery_task(taskid, projid, tasktype, traindata, enabletest, testdata, label, featsel, estimator, cv): taskDao = TaskDao() taskDao.updateTaskStatusByTaskId(task_id=taskid, task_status='Running') try: train_data_queryset = taskDao.getDataByDataName(data_name=traindata) train_data_list = [] for itm in list(train_data_queryset): train_data_list.append(pd.read_json(itm[0])) test_data_list = [] if enabletest: test_data_queryset = taskDao.getDataByDataName(data_name=testdata) for jtm in list(test_data_queryset): test_data_list.append(pd.read_json(jtm[0])) results = ml_task(taskid, projid, tasktype, train_data_list, enabletest, test_data_list, label, featsel, estimator, cv) taskDao.updateTaskStatusByTaskId(task_id=taskid, task_status='Finished') taskDao.updateTaskResultByTaskId(task_id=taskid, task_result=json.dumps(results)) except Exception as e: traceback.print_exc() taskDao.updateTaskStatusByTaskId(task_id=taskid, task_status='Failed') taskDao.updateTaskResultByTaskId(task_id=taskid, task_result=json.dumps(str(e))) return
def insertNewTask(task_form): """ Create a new task and save the configuration information. Parameters: DB: Database object for DAO; task_form: Task information. Return: task_form: Response for debug and confirmation; task_config: Configure celery tasks; status: Success or not """ counter = 0 task_name = task_form.get('task_name') for trans in task_form.get('feat_sel'): for estim in task_form.get('estimator'): task_config = {} task_config['proj_name'] = task_form['proj_name'] task_config['train_data'] = task_form['train_data'] task_config['enable_test'] = task_form['enable_test'] task_config['test_data'] = task_form['test_data'] task_config['label'] = task_form['label'] task_config['feat_sel'] = trans task_config['estimator'] = estim task_config['cv_type'] = task_form['cv_type'] task_form['task_config'] = task_config task_form['task_id'] = 'TASK' + time.strftime( '%y%m%d%H%M%S') + '{:02d}'.format(counter) model_abbrs = { 'Analysis of Variance': 'anova', 'Principal Component Analysis': 'pca', 'Recursive Feature Elimination': 'rfe', 'None': 'none', 'Support Vector Machine': 'svm', 'Random Forest': 'rf', 'Linear Discriminative Analysis': 'lda', 'Logistic Regression': 'lr', 'K Nearest Neighbor': 'knn', 'Support Vector Regression': 'svr', 'Elastic Net': 'en', 'Ordinary Least Square': 'ols', 'Lasso Regression': 'lasso', 'Ridge Regression': 'ridge' } if task_name: task_form['task_name'] = task_name + '_' + model_abbrs[ trans] + '_' + model_abbrs[estim] else: task_form[ 'task_name'] = task_form['task_id'] + '_' + model_abbrs[ trans] + '_' + model_abbrs[estim] taskDao = TaskDao() taskDao.insertNewTask(task_form) new_ml_celery_task.delay(taskid=task_form['task_id'], projid=task_form['proj_id'], tasktype=task_form['task_type'], traindata=task_config['train_data'], enabletest=task_config['enable_test'], testdata=task_config['test_data'], label=task_config['label'], featsel=task_config['feat_sel'], estimator=task_config['estimator'], cv=task_config['cv_type']) counter += 1 return
def new_sa_celery_task(taskid, tasktype, testvardatax, groupvardatay): taskDao = TaskDao() taskDao.updateTaskStatusByTaskId(task_id=taskid, task_status='Running') try: testvardatax_queryset = taskDao.getDataByDataName(data_name=testvardatax) testvardatax_list = [] for itm in list(testvardatax_queryset): testvardatax_list.append(pd.read_json(itm[0])) groupvardatay_list = [] groupvardatay_queryset = taskDao.getDataByDataName(data_name=groupvardatay) for jtm in list(groupvardatay_queryset): groupvardatay_list.append(pd.read_json(jtm[0])) results = test_sa_task(taskid, tasktype, testvardatax_list, groupvardatay_list) taskDao.updateTaskStatusByTaskId(task_id=taskid, task_status='Finished') taskDao.updateTaskResultByTaskId(task_id=taskid, task_result=json.dumps(results)) except Exception as e: traceback.print_exc() taskDao.updateTaskStatusByTaskId(task_id=taskid, task_status='Failed') taskDao.updateTaskResultByTaskId(task_id=taskid, task_result=json.dumps(str(e))) return