def get_server(): global args try: server = {} conf = ConfigLoader() pl = platform.platform() if pl.startswith('Windows'): def_host = 'localhost' elif pl.startswith('Linux'): if 'Microsoft' in pl: def_host = 'localhost' else: def_host = '0.0.0.0' elif pl.startswith('Darwin'): def_host = '0.0.0.0' else: def_host = 'localhost' host = au.get_system_conf().get('gui_host', def_host) port = au.get_system_conf().get('gui_port', 8060) server['host'] = host server['port'] = port return server except Exception as e: logger.exception(e) if debug: traceback.print_exc() logger.info('Exiting...') print('Error occurred while OpenCRAVAT server.\nCheck {} for details.'. format(log_path)) exit()
async def clean_sessions(): """ Clean sessions periodically. """ try: max_age = au.get_system_conf().get('max_session_age', 604800) # default 1 week interval = au.get_system_conf().get('session_clean_interval', 3600) # default 1 hr while True: await cravat_multiuser.admindb.clean_sessions(max_age) await asyncio.sleep(interval) except Exception as e: logger.exception(e) if debug: traceback.print_exc()
def get_server(): global args try: server = {} pl = platform.platform() if pl.startswith("Windows"): def_host = "localhost" elif pl.startswith("Linux"): if "Microsoft" in pl: def_host = "localhost" else: def_host = "0.0.0.0" elif pl.startswith("Darwin"): def_host = "0.0.0.0" else: def_host = "localhost" if ssl_enabled: if "gui_host_ssl" in sysconf: host = sysconf["gui_host_ssl"] elif "gui_host" in sysconf: host = sysconf["gui_host"] else: host = def_host if "gui_port_ssl" in sysconf: port = sysconf["gui_port_ssl"] elif "gui_port" in sysconf: port = sysconf["gui_port"] else: port = 8443 else: host = au.get_system_conf().get("gui_host", def_host) port = au.get_system_conf().get("gui_port", 8080) server["host"] = host server["port"] = port return server except Exception as e: logger.exception(e) if debug: traceback.print_exc() logger.info("Exiting...") print( "Error occurred while OpenCRAVAT server.\nCheck {} for details.".format( log_path ) ) exit()
def get_webviewerconf (): conf_path = os.path.join( au.get_system_conf()['home'], 'viewers', 'webviewer', 'webviewer.yml') with open(conf_path) as f: conf = yaml.safe_load(f) return conf
def install_base(args): sys_conf = au.get_system_conf() base_modules = sys_conf.get(constants.base_modules_key, []) args = SimpleNamespace(modules=base_modules, force_data=False, skip_installed=True, version=None, yes=True) install_modules(args)
def install_base(args): sys_conf = au.get_system_conf() base_modules = sys_conf.get(constants.base_modules_key, []) args = SimpleNamespace( modules=base_modules, force_data=args.force_data, version=None, yes=True, private=False, skip_dependencies=False, force=args.force, skip_data=False, install_pypi_dependency=args.install_pypi_dependency, md=args.md) install_modules(args)
async def try_remote_user_login(request): if enable_remote_user_header: remote_user_header = au.get_system_conf().get('remote_user_header', "remote_user") if remote_user_header in request.headers: remote_username = request.headers.get(remote_user_header) if remote_username: session = await get_session(request) session['username'] = remote_username create_user_dir_if_not_exist(remote_username) sessionkey = get_session_key() session['sessionkey'] = sessionkey await admindb.add_sessionkey(remote_username, sessionkey) return True return False
async def load_live_modules(module_names=[]): global live_modules global live_mapper global include_live_modules global exclude_live_modules print('populating live annotators') conf = au.get_system_conf() if 'live' in conf: live_conf = conf['live'] if 'include' in live_conf: include_live_modules = live_conf['include'] else: include_live_modules = [] if 'exclude' in live_conf: exclude_live_modules = live_conf['exclude'] else: exclude_live_modules = [] else: include_live_modules = [] exclude_live_modules = [] if live_mapper is None: cravat_conf = au.get_cravat_conf() if 'genemapper' in cravat_conf: default_mapper = cravat_conf['genemapper'] else: default_mapper = 'hg38' live_mapper = get_live_mapper(default_mapper) modules = au.get_local_module_infos(types=['annotator']) for module in modules: if module.name in live_modules: continue if module.name not in module_names: if module.name in exclude_live_modules: continue if len(include_live_modules ) > 0 and module.name not in include_live_modules: continue if 'secondary_inputs' in module.conf: continue annotator = get_live_annotator(module.name) if annotator is None: continue live_modules[module.name] = annotator print('done populating live annotators')
def publish_module(args): if args.md is not None: constants.custom_modules_dir = args.md sys_conf = au.get_system_conf() if args.user is None: if 'publish_username' in sys_conf: args.user = sys_conf['publish_username'] else: args.user = input("Username: ") if args.password is None: if 'publish_password' in sys_conf: args.password = sys_conf['publish_password'] else: args.password = getpass() au.publish_module(args.module, args.user, args.password, overwrite=args.overwrite, include_data=args.data)
async def submit(request): global filerouter global servermode sysconf = au.get_system_conf() size_cutoff = sysconf['gui_input_size_limit'] if request.content_length is None: return web.HTTPLengthRequired( text=json.dumps({ 'status': 'fail', 'msg': 'Content-Length header required' })) if request.content_length > size_cutoff * 1024 * 1024: return web.HTTPRequestEntityTooLarge(text=json.dumps( { 'status': 'fail', 'msg': f'Input is too big. Limit is {size_cutoff}MB.' })) if servermode and server_ready: r = await cravat_multiuser.is_loggedin(request) if r == False: return web.json_response({'status': 'notloggedin'}) jobs_dirs = await filerouter.get_jobs_dirs(request) jobs_dir = jobs_dirs[0] job_id = get_next_job_id() job_dir = os.path.join(jobs_dir, job_id) os.makedirs(job_dir, exist_ok=True) reader = await request.multipart() job_options = {} input_files = [] while True: part = await reader.next() if not part: break if part.name.startswith('file_'): input_files.append(part) # Have to write to disk here wfname = part.filename wpath = os.path.join(job_dir, wfname) with open(wpath, 'wb') as wf: wf.write(await part.read()) elif part.name == 'options': job_options = await part.json() input_fnames = [fp.filename for fp in input_files] run_name = input_fnames[0] if len(input_fnames) > 1: run_name += '_and_' + str(len(input_fnames) - 1) + '_files' info_fname = '{}.status.json'.format(run_name) job_info_fpath = os.path.join(job_dir, info_fname) job = WebJob(job_dir, job_info_fpath) job.save_job_options(job_options) job.set_info_values(orig_input_fname=input_fnames, run_name=run_name, submission_time=datetime.datetime.now().isoformat(), viewable=False) # Subprocess arguments input_fpaths = [os.path.join(job_dir, fn) for fn in input_fnames] run_args = ['oc', 'run'] for fn in input_fnames: run_args.append(os.path.join(job_dir, fn)) # Annotators if 'annotators' in job_options and len( job_options['annotators'] ) > 0 and job_options['annotators'][0] != '': annotators = job_options['annotators'] annotators.sort() run_args.append('-a') run_args.extend(annotators) else: annotators = '' run_args.append('-e') run_args.append('*') # Liftover assembly run_args.append('-l') if 'assembly' in job_options: assembly = job_options['assembly'] else: assembly = constants.default_assembly run_args.append(assembly) if servermode and server_ready: await cravat_multiuser.update_user_settings(request, {'lastAssembly': assembly}) else: au.set_cravat_conf_prop('last_assembly', assembly) # Reports if 'reports' in job_options and len(job_options['reports']) > 0: run_args.append('-t') run_args.extend(job_options['reports']) else: run_args.extend(['--skip', 'reporter']) # Note if 'note' in job_options: note = job_options['note'] if note != '': run_args.append('--note') run_args.append(note) # Forced input format if 'forcedinputformat' in job_options and job_options['forcedinputformat']: run_args.append('--input-format') run_args.append(job_options['forcedinputformat']) if servermode: run_args.append('--writeadmindb') run_args.extend(['--jobid', job_id]) run_args.append('--temp-files') global job_queue global run_jobs_info job_ids = run_jobs_info['job_ids'] job_ids.append(job_id) run_jobs_info['job_ids'] = job_ids qitem = {'cmd': 'submit', 'job_id': job_id, 'run_args': run_args} job_queue.put(qitem) status = {'status': 'Submitted'} job.set_info_values(status=status) if servermode and server_ready: await cravat_multiuser.add_job_info(request, job) # makes temporary status.json status_json = {} status_json['job_dir'] = job_dir status_json['id'] = job_id status_json['run_name'] = run_name status_json['assembly'] = assembly status_json['db_path'] = '' status_json['orig_input_fname'] = input_fnames status_json['orig_input_path'] = input_fpaths status_json['submission_time'] = datetime.datetime.now().isoformat() status_json['viewable'] = False status_json['note'] = note status_json['status'] = 'Submitted' status_json['reports'] = [] pkg_ver = au.get_current_package_version() status_json['open_cravat_version'] = pkg_ver status_json['annotators'] = annotators with open(os.path.join(job_dir, run_name + '.status.json'), 'w') as wf: json.dump(status_json, wf, indent=2, sort_keys=True) return web.json_response(job.get_info_dict())
async def delete_job(request): global job_queue job_id = request.match_info['job_id'] global filerouter job_dir = await filerouter.job_dir(request, job_id) qitem = {'cmd': 'delete', 'job_id': job_id, 'job_dir': job_dir} job_queue.put(qitem) while True: if os.path.exists(job_dir) == False: break else: await asyncio.sleep(0.5) return web.Response() system_conf = au.get_system_conf() if 'max_num_concurrent_jobs' not in system_conf: max_num_concurrent_jobs = constants.default_max_num_concurrent_jobs system_conf['max_num_concurrent_jobs'] = max_num_concurrent_jobs au.write_system_conf_file(system_conf) else: max_num_concurrent_jobs = system_conf['max_num_concurrent_jobs'] job_worker = None job_queue = None run_jobs_info = None def start_worker(): global job_worker global job_queue global run_jobs_info
def get_storeurl (request): conf = au.get_system_conf() return web.Response(text=conf['store_url'])
async def get_storeurl(request): conf = au.get_system_conf() store_url = conf['store_url'] if request.scheme == 'https': store_url = store_url.replace('http://', 'https://') return web.Response(text=store_url)
def main(url=None): global args try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(1) def wakeup(): loop.call_later(0.1, wakeup) def check_local_update(interval): try: ws.handle_modules_changed() except: traceback.print_exc() finally: loop.call_later(interval, check_local_update, interval) serv = get_server() global protocol host = serv.get('host') port = serv.get('port') try: sr = s.connect_ex((host, port)) s.close() if sr == 0: logger.info( 'wcravat already running. Exiting from this instance of wcravat...' ) print('OpenCRAVAT is already running at {}{}:{}.'.format( protocol, serv.get('host'), serv.get('port'))) global SERVER_ALREADY_RUNNING if url and not headless: webbrowser.open(url) return SERVER_ALREADY_RUNNING except requests.exceptions.ConnectionError: pass print('OpenCRAVAT is served at {}:{}'.format(serv.get('host'), serv.get('port'))) logger.info('Serving OpenCRAVAT server at {}:{}'.format( serv.get('host'), serv.get('port'))) print( '(To quit: Press Ctrl-C or Ctrl-Break if run on a Terminal or Windows, or click "Cancel" and then "Quit" if run through OpenCRAVAT app on Mac OS)' ) global loop loop = asyncio.get_event_loop() loop.call_later(0.1, wakeup) loop.call_later(1, check_local_update, 5) async def clean_sessions(): """ Clean sessions periodically. """ try: max_age = au.get_system_conf().get('max_session_age', 604800) # default 1 week interval = au.get_system_conf().get('session_clean_interval', 3600) # default 1 hr while True: await cravat_multiuser.admindb.clean_sessions(max_age) await asyncio.sleep(interval) except KeyboardInterrupt: pass except Exception as e: logger.exception(e) if debug: traceback.print_exc() if servermode and server_ready: if 'max_session_age' in au.get_system_conf(): loop.create_task(clean_sessions()) global ssl_enabled if ssl_enabled: global sc server = WebServer(loop=loop, ssl_context=sc, url=url) else: server = WebServer(loop=loop, url=url) try: loop.run_forever() except KeyboardInterrupt: pass except Exception as e: logger.exception(e) if debug: traceback.print_exc() logger.info('Exiting...') print( 'Error occurred while starting OpenCRAVAT server.\nCheck {} for details.' .format(log_path)) exit()
def _load_system_conf(self, build_all=True): self._system = au.get_system_conf() if build_all: self._build_all()