def remove_client(client_name): """ Remove a client from the registry """ if not isinstance(client_name, str): msg = 'client name must be a string' return networking.create_response(400, msg) registry = Registry.get_instance() registry.remove_client(client_name) logging.info(f"Removed {client_name}") return networking.create_response(200)
def add_client(client): """ Add a client to the registry """ registry = Registry.get_instance() added = registry.add_client(client) if not added: return networking.create_response(400, 'Client already registered') logging.info( f"""Added {client['name']}, listening host: {client['listen_host']}, \ external_host: {client['external_host']}""") return networking.create_response(200)
def echo(): client_name = app.config['client']['name'] celery_client.send_task('tasks.echo', [app.config['client'], app.config["ENV"]], serializer='pickle', queue=client_name) return networking.create_response(200)
def pca_projection(): logging.info('Computing projections') client_name = app.config['client']['name'] celery_client.send_task('tasks.pca', [request.data, app.config['client']], serializer='pickle', queue=client_name) return networking.create_response(200)
def compute_cost(): logging.info('Performing line search') client_name = app.config['client']['name'] celery_client.send_task('tasks.lineSearch', [request.data, app.config['client']], serializer='pickle', queue=client_name) return networking.create_response(200)
def compute_likelihoods(): logging.info('Regression update') client_name = app.config['client']['name'] celery_client.send_task('tasks.loglikelihood', [request.data, app.config['client']], serializer='pickle', queue=client_name) return networking.create_response(200)
def lr_init(): logging.info('Initializing Regression') client_name = app.config['client']['name'] celery_client.send_task('tasks.regression_init', [app.config['client'], app.config['ENV']], serializer='pickle', queue=client_name) return networking.create_response(200)
def init(): logging.info('Got command to initialize') client_name = app.config['client']['name'] celery_client.send_task('tasks.init_store', [app.config['client'], app.config["ENV"]], serializer='pickle', queue=client_name) return networking.create_response(200)
def after_delayed(): logging.info('called after_delayed celery entry point') client_name = app.config['client']['name'] celery_client.send_task('tasks.dependent', None, serializer='pickle', queue=client_name) return networking.create_response(200)
def lst_clients(): """ List all registered clients """ registry = Registry.get_instance() msg = registry.list_clients() return networking.create_response(200, msg)
def store_filtered(): logging.info('Got results of filtered positions') client_name = app.config['client']['name'] celery_client.send_task('tasks.store_filtered', [request.data, app.config['client']], serializer='pickle', queue=client_name) return networking.create_response(200)
def ld_report(): client_name = app.config['client']['name'] celery_client.send_task( 'tasks.report_ld', [request.data, app.config['client'], app.config['ENV']], serializer='pickle', queue=client_name) return networking.create_response(200)
def communicate_cov(): logging.info('Preparing to report covariances') client_name = app.config['client']['name'] celery_client.send_task('tasks.report_cov', [app.config['client'], app.config['ENV']], serializer='pickle', queue=client_name) return networking.create_response(200)
def data_adjust(): logging.info('Covariate update') client_name = app.config['client']['name'] celery_client.send_task( 'tasks.adjust', [request.data, app.config['client'], app.config["ENV"]], serializer='pickle', queue=client_name) return networking.create_response(200)
def qc(): logging.info('Got command for QC') client_name = app.config['client']['name'] celery_client.send_task( 'tasks.init_qc', [request.data, app.config['client'], app.config['ENV']], serializer='pickle', queue=client_name) return networking.create_response(200)
def delayed(): logging.info('called delayed celery entry point') client_name = app.config['client']['name'] # promise = celery_client.send_task('tasks.caller', [adder_fn, 1, 2], celery_client.send_task('tasks.caller', [adder_fn, 1, 2], serializer='pickle', queue=client_name) # resolution = promise.wait() # answer is in here, if the celery backend is defined. This will block. return networking.create_response(200)
def start_subtask(task_name, subtask_name, client_name): if task_name == Commands.INIT: if subtask_name == 'POS': task_init.store_positions(request.data, client_name) elif subtask_name == 'COUNT': task_init.store_counts(request.data, client_name) elif task_name.startswith(Commands.QC): if subtask_name == "FIN": if task_qc.filter_finished(client_name, Commands.QC): logging.info("Done with QC.") elif task_name.startswith(Commands.PCA): if subtask_name == "FIN": if task_qc.filter_finished(client_name, Commands.PCA): logging.info("Done with PCA filters. Initiating pruning") reset_states("PRUNE") ld_agg = task_pca.CovarianceAggregator.get_instance( len(Registry.get_instance().list_clients()), 50) # send message to start LD pruning ld_agg.send_request({}) elif subtask_name == "LD": ld_agg = task_pca.CovarianceAggregator.get_instance( len(Registry.get_instance().list_clients()), 50) ld_agg.update(request.data) elif subtask_name == "PCAPOS": task_pca.Position_reporter.get_instance().report_pos() elif subtask_name == "COV": task_pca.store_covariance(client_name, request.data) elif task_name.startswith(Commands.ASSO): ass_agg = task_ass.LogisticAdmm.get_instance({}, active=2) if subtask_name == "adjust": ass_agg.update_stats(request.data) elif subtask_name == "estimate": ass_agg.update(request.data) elif subtask_name == "pval": ass_agg.update_pval(request.data) elif subtask_name == "hessians": model, have_all_info = ass_agg.newton_stats_update(request.data) if have_all_info: ass_agg.newton_iter(model) elif subtask_name == "valback": ass_agg.collect_likelihoods(request.data) elif task_name == Commands.ECHO: if subtask_name == "ITR": echo = task_init.Echo.get_instance(1) echo.echo(client_name) avg_t = echo.echo(client_name) if avg_t is not None: logging.info(f"Avg echo time={avg_t}") return networking.create_response(200)
def start_task(task_name): logging.info(f'Got command to start {task_name}, starting...') args = {} for key, val in request.json.items(): args[key.upper()] = val if task_name == Commands.INIT: task_init.start_init_task() elif task_name.startswith(Commands.QC): args[Options.HWE] = args.get("QC_HWE", Thresholds.QC_hwe) args[Options.MAF] = args.get("QC_MAF", Thresholds.QC_maf) logging.info(f"Specified Filters :{args}") task_qc.start_client_qc_task(args) task_qc.start_local_qc_task(args) elif task_name.startswith(Commands.PCA): args["PCA_PCS"] = int(args.get("PCA_PCS", Thresholds.PCA_pcs)) task_pca.Position_reporter.get_instance(args) if not task_pca.ready_to_decompose(): if not task_pca.filtered(): args[Options.MAF] = args.get("PCA_MAF", Thresholds.PCA_maf) if "PCA_LD_WINDOW" not in args: # default parameters args["PCA_LD_WINDOW"] = Thresholds.PCA_ld_window if "PCA_LD_THRESHOLD" not in args: # default parameters args["PCA_LD_THRESHOLD"] = Thresholds.PCA_ld_threshold args[Options.LD] = [ args["PCA_LD_WINDOW"], args["PCA_LD_THRESHOLD"] ] logging.info(f"Specified pruning filters :{args}") task_pca.start_pca_filters(args) else: logging.info("Reporting Filtered Sites") task_pca.Position_reporter.get_instance().report_pos() else: logging.info("starting eigen decomposition") task_pca.eigenDecompose(n_components=args["PCA_PCS"]) elif task_name == Commands.ASSO: args["ASSO_PCS"] = args.get("ASSO_PCS", Thresholds.ASSO_pcs) logging.info("Starting Associations") task_ass.LogisticAdmm.get_instance(args, active=2) elif task_name == Commands.ECHO: counts = args.get("ECHO_COUNTS", Thresholds.ECHO_COUNTS) echo = task_init.Echo.get_instance(counts) return networking.create_response(200, f'Started task {task_name}')
def report_status(client_name, status): logging.info(f'[{client_name}]: {status}') return networking.create_response(200)
def lr_association(): client_name = app.config['client']['name'] celery_client.send_task('tasks.asso', [request.data, app.config['client']], serializer='pickle', queue=client_name) return networking.create_response(200)
def list_tasks(): tsks = tasks.task_list return networking.create_response(200, tsks)
def next_task(): tsk = tasks.TaskReg.get_instance() msg = tsk.get_up_task() return networking.create_response(200, msg)