def statistics(self): jobs = DBSession.query(Job).join(PluginRequest).order_by(asc(PluginRequest.date_done)).all() # get number of jobs / month, days, ... d = {'months' : [0] * 12, 'days': [0] * 7, 'hours': [0] * 24, 'users': {}, 'plugins': {} } # set all users to prevent looking if user alrady set for each jobs users = DBSession.query(User).all() for user in users: d['users'][user.name] = 1 # do the same for all plugin plugs = operations.get_plugins_path(ordered=False) for plug in plugs: d['plugins'][plug['info']['title']] = 1 # now look at each jobs one by one for job in jobs: dd = job.request.date_done.strftime('%m %w %H').split() currentday = int(dd[1]) d['months'][int(dd[0]) - 1] += 1 d['days'][currentday] += 1 d['hours'][int(dd[2])] += 1 d['users'][job.request.user.name] += 1 try: d['plugins'][job.request.plugin.info['title']] += 1 except KeyError: pass d['users'] = [{'name': k, 'value': v} for k, v in d['users'].iteritems()] return {'jobs': json.dumps(d)}
def _log_form_request(plugin_id, user, parameters): """ log the plugin form submission. """ pl = PluginRequest() pl.plugin_id = plugin_id pl.user = user pl.parameters = get_formparameters(parameters) DBSession.add(pl) DBSession.flush() return pl
def _log_form_request(plugin_id, user, parameters): """ log the plugin form submission. """ pl = PluginRequest() pl.plugin_id = plugin_id pl.user = user pl.status = 'PENDING' params = get_formparameters(parameters) debug('set request params %s' % params,) #print ', '.join(['%s (%s) : %s (%s)' % (k, type(k), v, type(v)) for k, v in params.iteritems()]) pl.parameters = params DBSession.add(pl) DBSession.flush() return pl
def callback_results(self, task_id, results): results = json.loads(results) for result in results: task = DBSession.query(Task).filter(Task.task_id == task_id).first() res = Result() res.job_id = task.job.id if result.get('is_file', False): res.is_file = True res.path = result.get('path') res._type = result.get('type') res.fname = os.path.split(res.path)[1] else: res.result = result.get('value') DBSession.add(res) return {'status': 'success', 'retval': 1}
def _log_form_request(plugin_id, user, parameters): """ log the plugin form submission. """ pl = PluginRequest() pl.plugin_id = plugin_id pl.user = user pl.status = 'PENDING' params = get_formparameters(parameters) debug('set request params %s' % params, ) #print ', '.join(['%s (%s) : %s (%s)' % (k, type(k), v, type(v)) for k, v in params.iteritems()]) pl.parameters = params DBSession.add(pl) DBSession.flush() return pl
def index(self, *args): """ Display a list of all plugins in BioScript """ # get BioScript Server url (usually from config file) bs_server_url = tg.config.get('main.proxy') + '/' # build request to send to BioScript server bs_url = bs_server_url + 'plugins?ordered=true' # get the operation list back operation_list = urllib2.urlopen(bs_url).read() # fields can be pre-filled meth = 'get' if len(args) > 0 and args[0] == 'prefill': meth = 'get_prefill' # get previous launched jobs taht are in the session task_ids = session.get('task_ids', []) jobs = [] if task_ids: jobs = DBSession.query(Job).join(PluginRequest).filter( Job.task_id.in_(task_ids)).order_by( desc(PluginRequest.date_done)).all() # serve result on visual_index.mak template file return { 'oplist': operation_list, 'serv': bs_server_url, 'method': meth, 'jobs': jobs }
def status(self): jobs = DBSession.query(Job).all() plugins = operations.get_plugins_path() mapping = {'plugins': plugins, 'nbplugins': len(plugins), 'total': len(jobs), 'running': 0, 'failure': 0, 'pending': 0, 'success': 0} for job in jobs: mapping[job.status.lower()] += 1 return mapping
def stats(self): jobs = DBSession.query(Job).join(PluginRequest).order_by( asc(PluginRequest.date_done)).all() # get number of jobs / month, days, ... d = { 'months': [0] * 12, 'days': [0] * 7, 'hours': [0] * 24, 'users': {}, 'remotes': {}, 'plugins': {} } # set all users to prevent looking if user alrady set for each jobs users = DBSession.query(User).all() for user in users: d['users'][user.name] = 1 if user.name == 'anonymous': d['remotes'][user.remote] = 1 # do the same for all plugin plugs = operations.get_plugins_path(ordered=False) for plug in plugs: d['plugins'][plug['info']['title']] = 1 # now look at each jobs one by one for job in jobs: dd = job.request.date_done.strftime('%m %w %H').split() currentday = int(dd[1]) d['months'][int(dd[0]) - 1] += 1 d['days'][currentday] += 1 d['hours'][int(dd[2])] += 1 d['users'][job.request.user.name] += 1 if job.request.user.name == 'anonymous': d['remotes'][job.request.user.remote] += 1 try: d['plugins'][job.request.plugin.info['title']] += 1 except KeyError: pass d['users'] = [{ 'name': k, 'value': v } for k, v in d['users'].iteritems()] d['remotes'] = [{ 'name': k, 'value': v } for k, v in d['remotes'].iteritems()] return {'jobs': json.dumps(d)}
def callback_results(self, task_id, results): debug('GOT CALLBACK %s %s' % (task_id, results)) results = json.loads(results) for result in results: task = DBSession.query(Task).filter( Task.task_id == task_id).first() res = Result() res.job_id = task.job.id if result.get('is_file', False): res.is_file = True res.path = result.get('path') res._type = result.get('type') res.fname = os.path.split(res.path)[1] else: res.result = result.get('value') DBSession.add(res) return {'status': 'success', 'retval': 1}
def _check_in_database(plug): """ Check if the plugin is in the database else create it """ DB_plug = DBSession.query(Plugin).filter(Plugin.generated_id == plug.unique_id()).first() if DB_plug is None: DB_plug = Plugin() DB_plug.generated_id = plug.unique_id() DB_plug.deprecated = plug.deprecated info = plug.info DB_plug.info = {'title': info['title'], 'description': info['description'], 'path': info['path'], 'in': info['in'], 'out': info['out'], 'meta': info['meta']} DBSession.add(DB_plug)
def get(self, task_id, result_id): result_id = int(result_id) job = DBSession.query(Job).filter(Job.task_id == task_id).first() for result in job.results: if result.id == result_id: if result.is_file: return file_response(result.path) else: return {'result': result.result} return {'error': "Job identifier & result identifier doesn't correspond."}
def load_plugins(): """ Load the plugin into BioScript application """ # update plugins from github do_update = tg.config.get('plugins.update') if do_update and do_update.lower() in ['1', 'true', 't']: _update(tg.config.get('plugins.github.url')) # initialize plugin manager with yapsy plug_dir = constants.plugin_directory() manager = PluginManager(restrict='bs-operation') manager.add_plugin_path(plug_dir) print ' --- init plugins located in %s ---' % plug_dir # from yapsy.PluginManager import PluginManager # manager = PluginManager() # manager.setPluginPlaces([plug_dir]) # manager.setCategoriesFilter({ # "Operations": OperationPlugin, # }) # manager.collectPlugins() # check plugins and add in db if not already plugids = [] for name, clazz in manager.plugins().iteritems(): plug = clazz() #p = plug.plugin_object _check_plugin_info(plug) if tg.config['pylons.app_globals']: _check_in_database(plug) plugids.append(plug.unique_id()) # check deprecated plugins if tg.config['pylons.app_globals']: for p in DBSession.query(Plugin).all(): if p.generated_id not in plugids: p.deprecated = True DBSession.add(p) DBSession.flush() return manager
def _check_in_database(plug): """ Check if the plugin is in the database else create it """ DB_plug = DBSession.query(Plugin).filter( Plugin.generated_id == plug.unique_id()).first() if DB_plug is None: DB_plug = Plugin() DB_plug.generated_id = plug.unique_id() DB_plug.deprecated = plug.deprecated info = plug.info DB_plug.info = { 'title': info['title'], 'description': info['description'], 'path': info['path'], 'in': info['in'], 'out': info['out'], 'meta': info['meta'] } DBSession.add(DB_plug)
def all(self, limit=None, status=None): if limit: try: limit = int(limit) except ValueError: pass limit = (limit or 50) jobs = DBSession.query(Job).join(PluginRequest).order_by(desc(PluginRequest.date_done))[:limit] if status and status.lower() in ['success', 'failure', 'started']: jobs = [j for j in jobs if j.status == status.upper()] return {'jobs': jobs}
def all(self, limit=None, status=None): if limit: try: limit = int(limit) except ValueError: pass limit = (limit or 50) jobs = DBSession.query(Job).join(PluginRequest).order_by( desc(PluginRequest.date_done))[:limit] if status and status.lower() in ['success', 'failure', 'started']: jobs = [j for j in jobs if j.status == status.upper()] return {'jobs': jobs}
def load_plugins(): manager = PluginManager('bsPlugins') manager.load() plugids = [] for name, clazz in manager.plugins().iteritems(): plug = clazz() #p = plug.plugin_object #_check_plugin_info(plug) if tg.config.get('pylons.app_globals'): _check_in_database(plug) plugids.append(plug.unique_id()) # check deprecated plugins if tg.config.get('pylons.app_globals'): for p in DBSession.query(Plugin).all(): if p.generated_id not in plugids: p.deprecated = True DBSession.add(p) DBSession.flush() return manager
def get(self, task_id, result_id, xsend=False): result_id = int(result_id) job = DBSession.query(Job).filter(Job.task_id == task_id).first() for result in job.results: if result.id == result_id: if result.is_file: if xsend: return file_response_with_xsendfile(result.path) return file_response(result.path) else: return {'result': result.result} return { 'error': "Job identifier & result identifier doesn't correspond." }
def info(self, task_id): job = DBSession.query(Job).filter(Job.task_id == task_id).first() req = job.request results = [{ 'id': r.id, 'result': r.result, 'is_file': r.is_file, 'fname': r.fname } for r in job.results] return { 'results': results, 'status': job.status, 'plugin_id': req.plugin.id, 'parameters': req.sanitized_parameter() }
def status(self): jobs = DBSession.query(Job).all() plugins = operations.get_plugins_path() mapping = { 'plugins': plugins, 'nbplugins': len(plugins), 'total': len(jobs), 'running': 0, 'failure': 0, 'pending': 0, 'success': 0 } for job in jobs: mapping[job.status.lower()] += 1 return mapping
def index(self, *args, **kw): jobs = DBSession.query(Job).all() plugins = operations.get_plugins_path() mapping = {'plugins': plugins, 'ordered': operations.get_plugins_path(ordered=True), 'nbplugins': len(plugins), 'total': len(jobs), 'started': 0, 'failure': 0, 'pending': 0, 'success': 0} for job in jobs: if job.status.lower() not in mapping: mapping['started'] += 1 else: mapping[job.status.lower()] += 1 return mapping
def index(self, task_id=None): if task_id is None: return {'job_id': None} job = DBSession.query(Job).filter(Job.task_id == task_id).first() if job is None: return {'job_id': True, 'error': 'Wrong job identifier, "%s" is not recognized as a valid job.' % task_id} results = [{'is_file': result.is_file, 'result': result.result, 'path':get_result_url(result, task_id), 'fname': result.fname} for result in job.results] # additionnal information trace = job.error or '' req = job.request plug = req.plugin datedone = datetime.strftime(req.date_done, '%a %d %b %Y at %H:%M:%S') plugin_id = plug.id plugin_info = plug.info parameters = req.parameters return {'status': job.status, 'task_id': task_id, 'job_id': job.id, 'results': results, 'traceback': trace, 'date': datedone, 'plugin_id': plugin_id, 'plugin_info': plugin_info, 'parameters': parameters}
def index(self, *args): """ Display a list of all plugins in BioScript """ # get BioScript Server url (usually from config file) bs_server_url = tg.config.get('main.proxy') + '/' # build request to send to BioScript server bs_url = bs_server_url + 'plugins?ordered=true' # get the operation list back operation_list = urllib2.urlopen(bs_url).read() # fields can be pre-filled meth = 'get' if len(args) > 0 and args[0] == 'prefill': meth = 'get_prefill' # get previous launched jobs taht are in the session task_ids = session.get('task_ids', []) jobs = [] if task_ids: jobs = DBSession.query(Job).join(PluginRequest).filter(Job.task_id.in_(task_ids)).order_by(desc(PluginRequest.date_done)).all() # serve result on visual_index.mak template file return {'oplist': operation_list, 'serv': bs_server_url, 'method': meth, 'jobs': jobs}
def setdefaultkey(self): uid = str(uuid.uuid4()) while DBSession.query(User).filter(User.key == uid).first(): uid = str(uuid.uuid4()) return uid
def _log_job_request(request_id, task_id): job = Job() job.request_id = request_id job.task_id = task_id DBSession.add(job) return job
def validate(self, **kw): """ plugin parameters validation """ user = util.get_user(tg.request) debug('Got request validation from user %s' % user) if not 'bs_private' in kw: tg.abort(400, "Plugin identifier not found in the request.") debug('params %s' % kw, 1) bs_private = copy.deepcopy(json.loads(kw['bs_private'])) debug('private %s' % bs_private, 1) plugin_id = bs_private['pp']['id'] if plugin_id is None: tg.abort(400, "Plugin identifier not found in the request.") # check plugin id plug = putil.get_plugin_byId(plugin_id) if plug is None: tg.abort(400, "Bad plugin identifier") # get plugin form output obj = plug info = obj.info form = info.get('output')() # callback for jsonP callback = kw.get('callback', 'callback') # get the plugin from the database plugin_db = DBSession.query(Plugin).filter(Plugin.generated_id == obj.unique_id()).first() plugin_request = _log_form_request(plugin_id=plugin_db.id, user=user, parameters=kw) if 'prefill' in bs_private: prefill_fields(info.get('in'), form, bs_private['prefill'], kw, replace_value=False) debug('prefill in validation', 3) del bs_private['prefill'] # validation try: form = form().req() form.validate(kw) except (tw2.core.ValidationError, Invalid) as e: main_proxy = tg.config.get('main.proxy') e.widget.action = main_proxy + tg.url('plugins/index', {'id': plugin_id}) debug('private after validation failed %s' % bs_private, 1) #value = {'bs_private': json.dumps(bs_private)} #debug('value %s' % value) #e.widget.value = value plugin_request.status = 'FAILED' plugin_request.error = str(e) DBSession.add(plugin_request) return jsonp_response(**{'validation': 'failed', 'desc': info.get('description'), 'title': info.get('title'), 'widget': e.widget.display(), 'callback': callback}) debug('Validation pass') #if the validation passes, remove private parameters from the request del kw['bs_private'] if 'key' in kw: del kw['key'] # fetch form files try: inputs_directory = filemanager.fetch(user, obj, kw) except Exception as e: plugin_request.status = 'FAILED' plugin_request.error = str(e) DBSession.add(plugin_request) import sys import traceback etype, value, tb = sys.exc_info() traceback.print_exception(etype, value, tb) return jsonp_response(**{'validation': 'success', 'desc': info.get('description'), 'title': info.get('title'), 'error': 'error while fetching files : ' + str(e), 'callback': callback}) debug('Files fetched') # get output directory to write results outputs_directory = filemanager.temporary_directory(constants.paths['data']) service_callback = None debug(user) if user.is_service: debug('is service', 1) # def out_path(service_name): o = services.service_manager.get(user.name, constants.SERVICE_RESULT_ROOT_PARAMETER) if o: outputs_directory = o service_callback = services.service_manager.get(user.name, constants.SERVICE_CALLBACK_URL_PARAMETER) debug('Output dir = %s' % outputs_directory) # get user parameters from the request user_parameters = bs_private.get('app', "{}") debug('get user parameters : %s' % user_parameters, 1) # get response config from the request resp_config = bs_private.get('cfg', None) plugin_info = {'title': info['title'], 'plugin_id': plugin_db.id, 'generated_id': plugin_db.generated_id, 'description': info['description'], 'path': info['path'], 'in': info['in'], 'out': info['out'], 'meta': info['meta']} # call plugin process bioscript_callback = tg.config.get('main.proxy') + '/' + tg.url('plugins/callback_results') async_res = tasks.plugin_job.delay(user.name, inputs_directory, outputs_directory, plugin_info, user_parameters, service_callback, bioscript_callback, **kw) task_id = async_res.task_id _log_job_request(plugin_request.id, task_id) if resp_config and resp_config.get('plugin_info', '') == 'min': return jsonp_response(**{'validation': 'success', 'plugin_id': plugin_id, 'task_id': task_id, 'callback': callback, 'app': user_parameters}) return jsonp_response(**{ 'validation': 'success', 'plugin_id': plugin_id, 'task_id': task_id, 'plugin_info': json.dumps( {'title': info['title'], 'description': info['description'], 'path': info['path'], 'in': info['in'], 'out': info['out'], 'meta': info['meta']}), 'callback': callback, 'app': user_parameters})
def validate(self, **kw): """ plugin parameters validation """ user = util.get_user(tg.request) # get private bioscript parameters if not 'bs_private' in kw: debug('bs_private not found') tg.abort(400, "Plugin identifier not found in the request.") bs_private = copy.deepcopy(json.loads(kw['bs_private'])) debug('\n[plugin controller]\nVALIDATE %s' % kw, ) # get the plugin from the private parameters plugin_id = 0 try: plugin_id = bs_private['pp']['id'] except KeyError: tg.abort(400, "Plugin identifier not found in the request.") if plugin_id == 0: tg.abort(400, "Plugin identifier not found in the request.") plug = operations.get_plugin_byId(plugin_id) if plug is None: tg.abort(400, "Bad plugin identifier.") # get the plugin from database plugin_db = DBSession.query(Plugin).filter( Plugin.generated_id == plug.unique_id()).first() # validate the form # we must do an HTTP call because I don't find a way # to call the 'render' method on 'plugin_validate' template request_url = tg.config.get('main.proxy') + '/plugins/_validate' validated = True info = plug.info # get the callback if any callback = kw.get('callback', 'callback') try: form = urllib2.urlopen(request_url, urllib.urlencode(kw)).read() # add some header because the request can come from another domain response.headers['Access-Control-Allow-Headers'] = 'X-CSRF-Token' response.headers['Access-Control-Allow-Origin'] = '*' try: form = json.loads(form) except: pass validated = isinstance(form, dict) except Exception as e: util.print_traceback() validated = False form = 'Problem with Bioscript server.' if not validated: debug('Validation failed', ) return json.dumps({ 'validation': 'failed', 'desc': info.get('description'), 'title': info.get('title'), 'widget': form, 'callback': callback }) # validation passes new_params = form['params'] new_params = kw debug('Validation passes with params : %s' % new_params, ) # we regoup all multi stuff in a single list: # for instance a multi field will give parameters like: # {SigMulti:1:signals: val1, SigMulti:2:signals: val2, ...} # and we will transform it to # {SigMulti: { signals : [val1, val2], ...} grouped_params = {} todel = [] for k, v in new_params.iteritems(): m = multipattern.match(k) if m is not None: todel.append(k) if v or isinstance(v, cgi.FieldStorage): key1, n, key2 = m.groups() if key1 not in grouped_params: grouped_params[key1] = {} if key2 in grouped_params[key1]: grouped_params[key1][key2][int(n) - 1] = v elif not key2.endswith('bs_group'): sl = SparseList() sl[int(n) - 1] = v grouped_params[key1][key2] = sl debug('group "multi" params: %s' % grouped_params) # debug('check fieldstorages',) # # must keep fieldstorages because they were converted to str # fs_bk = [] # import cgi # for k, v in kw.iteritems(): # if isinstance(v, cgi.FieldStorage): # fs_bk.append((k, v)) # debug(fs_bk) # for fsk, fsv in fs_bk: # m = multipattern.match(fsk) # if m: # key1, n, key2 = m.groups() # grouped_params[key1][key2][int(n) - 1] = fsv # else: # grouped_params[fsk] = fsv new_params.update(grouped_params) # delete multi parameters for td in todel: del new_params[td] # but we need to keep all params that are multi and urls kw = new_params #remove private parameters from the request if 'bs_private' in kw: del kw['bs_private'] if 'key' in kw: del kw['key'] debug('New params are : %s' % new_params, ) # update plugin arameters # log the request, it's a valid one plugin_request = _log_form_request(plugin_id=plugin_db.id, user=user, parameters=dict(kw)) DBSession.add(plugin_request) debug('get output directory', ) # get output directory to write results outputs_directory = filemanager.temporary_directory( constants.paths['data']) service_callback = None # if the user is a service, get parameters from configuration if user.is_service: debug('is service', ) o = services.service_manager.get( user.name, constants.SERVICE_RESULT_ROOT_PARAMETER) if o: outputs_directory = o service_callback = services.service_manager.get( user.name, constants.SERVICE_CALLBACK_URL_PARAMETER) debug('Write result in %s' % outputs_directory, ) # get prvate parameters from the request private_parameters = bs_private.get('app', "{}") # get response configuration from the request resp_config = bs_private.get('cfg', None) plugin_info = { 'title': info['title'], 'plugin_id': plugin_db.id, 'generated_id': plugin_db.generated_id, 'description': info['description'], 'path': info['path'], 'in': info['in'], 'out': info['out'], 'meta': info['meta'] } # define the bioscript callback bioscript_callback = tg.config.get('main.proxy') + '/' + tg.url( 'plugins/callback_results') debug("callback on bs : %s " % bioscript_callback, ) # if some files come from a file field, we must download them directly inputs_directory, dwdfiles = filemanager.fetchfilefields( user, plug, kw) # chain jobs : fetch files then plugin process async_res = tasks.plugin_job.delay(user, plug, inputs_directory, outputs_directory, dwdfiles, plugin_info, private_parameters, service_callback, bioscript_callback, **kw) task_id = async_res.task_id debug('Launch task %s' % task_id, ) # log the job request _log_job_request(plugin_request.id, task_id) #prepare the response resp = { 'validation': 'success', 'plugin_id': plugin_id, 'task_id': task_id, 'callback': callback, 'app': private_parameters } if resp_config and resp_config.get('plugin_info', '') == 'min': resp.update({ 'plugin_info': json.dumps({ 'title': info['title'], 'description': info['description'], 'path': info['path'], 'in': info['in'], 'out': info['out'], 'meta': info['meta'] }) }) return json.dumps(resp)
def info(self, task_id): job = DBSession.query(Job).filter(Job.task_id == task_id).first() req = job.request results = [{'id': r.id, 'result': r.result, 'is_file': r.is_file, 'fname': r.fname} for r in job.results] return {'results': results, 'status': job.status, 'plugin_id': req.plugin.id, 'parameters': req.sanitized_parameter()}
def validate(self, **kw): """ plugin parameters validation """ user = util.get_user(tg.request) # get private bioscript parameters if not 'bs_private' in kw: debug('bs_private not found') tg.abort(400, "Plugin identifier not found in the request.") bs_private = copy.deepcopy(json.loads(kw['bs_private'])) debug('\n[plugin controller]\nVALIDATE %s' % kw,) # get the plugin from the private parameters plugin_id = 0 try: plugin_id = bs_private['pp']['id'] except KeyError: tg.abort(400, "Plugin identifier not found in the request.") if plugin_id == 0: tg.abort(400, "Plugin identifier not found in the request.") plug = operations.get_plugin_byId(plugin_id) if plug is None: tg.abort(400, "Bad plugin identifier.") # get the plugin from database plugin_db = DBSession.query(Plugin).filter(Plugin.generated_id == plug.unique_id()).first() # validate the form # we must do an HTTP call because I don't find a way # to call the 'render' method on 'plugin_validate' template request_url = tg.config.get('main.proxy') + '/plugins/_validate' validated = True info = plug.info # get the callback if any callback = kw.get('callback', 'callback') try: form = urllib2.urlopen(request_url, urllib.urlencode(kw)).read() # add some header because the request can come from another domain response.headers['Access-Control-Allow-Headers'] = 'X-CSRF-Token' response.headers['Access-Control-Allow-Origin'] = '*' try: form = json.loads(form) except: pass validated = isinstance(form, dict) except Exception as e: util.print_traceback() validated = False form = 'Problem with Bioscript server.' if not validated: debug('Validation failed',) return json.dumps({'validation': 'failed', 'desc': info.get('description'), 'title': info.get('title'), 'widget': form, 'callback': callback}) # validation passes new_params = form['params'] new_params = kw debug('Validation passes with params : %s' % new_params,) # we regoup all multi stuff in a single list: # for instance a multi field will give parameters like: # {SigMulti:1:signals: val1, SigMulti:2:signals: val2, ...} # and we will transform it to # {SigMulti: { signals : [val1, val2], ...} grouped_params = {} todel = [] for k, v in new_params.iteritems(): m = multipattern.match(k) if m is not None: todel.append(k) if v or isinstance(v, cgi.FieldStorage): key1, n, key2 = m.groups() if key1 not in grouped_params: grouped_params[key1] = {} if key2 in grouped_params[key1]: grouped_params[key1][key2][int(n) - 1] = v elif not key2.endswith('bs_group'): sl = SparseList() sl[int(n) - 1] = v grouped_params[key1][key2] = sl debug('group "multi" params: %s' % grouped_params) # debug('check fieldstorages',) # # must keep fieldstorages because they were converted to str # fs_bk = [] # import cgi # for k, v in kw.iteritems(): # if isinstance(v, cgi.FieldStorage): # fs_bk.append((k, v)) # debug(fs_bk) # for fsk, fsv in fs_bk: # m = multipattern.match(fsk) # if m: # key1, n, key2 = m.groups() # grouped_params[key1][key2][int(n) - 1] = fsv # else: # grouped_params[fsk] = fsv new_params.update(grouped_params) # delete multi parameters for td in todel: del new_params[td] # but we need to keep all params that are multi and urls kw = new_params #remove private parameters from the request if 'bs_private' in kw: del kw['bs_private'] if 'key' in kw: del kw['key'] debug('New params are : %s' % new_params,) # update plugin arameters # log the request, it's a valid one plugin_request = _log_form_request(plugin_id=plugin_db.id, user=user, parameters=dict(kw)) DBSession.add(plugin_request) debug('get output directory',) # get output directory to write results outputs_directory = filemanager.temporary_directory(constants.paths['data']) service_callback = None # if the user is a service, get parameters from configuration if user.is_service: debug('is service',) o = services.service_manager.get(user.name, constants.SERVICE_RESULT_ROOT_PARAMETER) if o: outputs_directory = o service_callback = services.service_manager.get(user.name, constants.SERVICE_CALLBACK_URL_PARAMETER) debug('Write result in %s' % outputs_directory,) # get prvate parameters from the request private_parameters = bs_private.get('app', "{}") # get response configuration from the request resp_config = bs_private.get('cfg', None) plugin_info = {'title': info['title'], 'plugin_id': plugin_db.id, 'generated_id': plugin_db.generated_id, 'description': info['description'], 'path': info['path'], 'in': info['in'], 'out': info['out'], 'meta': info['meta']} # define the bioscript callback bioscript_callback = tg.config.get('main.proxy') + '/' + tg.url('plugins/callback_results') debug("callback on bs : %s " % bioscript_callback,) # if some files come from a file field, we must download them directly inputs_directory, dwdfiles = filemanager.fetchfilefields(user, plug, kw) # chain jobs : fetch files then plugin process async_res = tasks.plugin_job.delay(user, plug, inputs_directory, outputs_directory, dwdfiles, plugin_info, private_parameters, service_callback, bioscript_callback, **kw) task_id = async_res.task_id debug('Launch task %s' % task_id,) # log the job request _log_job_request(plugin_request.id, task_id) #prepare the response resp = {'validation': 'success', 'plugin_id': plugin_id, 'task_id': task_id, 'callback': callback, 'app': private_parameters} if resp_config and resp_config.get('plugin_info', '') == 'min': resp.update({'plugin_info': json.dumps({'title': info['title'], 'description': info['description'], 'path': info['path'], 'in': info['in'], 'out': info['out'], 'meta': info['meta']})}) return json.dumps(resp)
def index(self, task_id=None, forceurl=False): if task_id is None: return {'job_id': None} job = DBSession.query(Job).filter(Job.task_id == task_id).first() if job is None: return { 'job_id': True, 'haserror': True, 'error': 'Wrong job identifier, "%s" is not recognized as a valid job.' % task_id, 'biorepodata': "{}", 'biorepourl': '' } if job.task is None: return { 'job_id': True, 'haserror': True, 'error': 'Task "%s" is PENDING.' % task_id, 'biorepodata': "{}", 'biorepourl': '' } req = job.request now = datetime.now() delta = timedelta(days=DAYS_LIMIT) deletion_date = job.task.date_done + delta jobdelta = now - job.task.date_done biorepodata = {} results = [] for result in job.results: uri = '' if jobdelta > delta and not forceurl: d = jobdelta - delta mess = 'File "%s" was deleted %s days ago. Files are kept in Bioscript only %s days.' % ( result.fname, d.days, DAYS_LIMIT) is_url = False else: d = delta - jobdelta uri = request.application_url + '/' + get_result_url( result, task_id) is_url = True mess = '<b>File will be deleted in %s days</b>. Files are kept in Bioscript only %s days.' % ( d.days, DAYS_LIMIT) if biorepo.SERVICE_UP: dt = { 'file_path': uri, 'description': req.description(), 'project_name': 'Analysis from Bioscript', 'sample_name': req.plugin.info.get('title', '-'), 'sample_type': 'BioScript analysis' } biorepodata['brepo_%s' % result.id] = dt mess += ' You could save it in <a id="brepo_%s" class="biorepourl">Biorepo</a>.' % result.id ## file_url : uri ## desc: req.parameters ## project_name: bioscript ## sample: plugin.info['title'] results.append({ 'is_file': result.is_file, 'result': result.result, 'mess': mess, 'uri': uri, 'is_url': is_url, 'fname': result.fname, 'deletion-date': deletion_date, }) # additionnal information trace = job.simple_error or '' complete = job.error or '' plug = req.plugin datedone = datetime.strftime(req.date_done, '%a %d %b %Y at %H:%M:%S') plugin_id = plug.id plugin_info = plug.info parameters = req.parameters trace = trace.replace('\n', '<br/>') complete = complete.replace('\n', '<br/>') return { 'haserror': False, 'status': job.status, 'task_id': task_id, 'job_id': job.id, 'results': results, 'traceback': trace, 'full_traceback': complete, 'date': datedone, 'plugin_id': plugin_id, 'plugin_info': plugin_info, 'parameters': parameters, 'plugin_generated_id': plug.generated_id, 'biorepodata': json.dumps(biorepodata), 'biorepourl': biorepo.SERVICE_UP and json.dumps(biorepo.BIOREPO_ACTION_URL) or '' }
def gettask(self, task_id): task = DBSession.query(Task).filter(Task.task_id == task_id).first() return {'task': task}
def index(self, task_id=None, forceurl=False): if task_id is None: return {'job_id': None} job = DBSession.query(Job).filter(Job.task_id == task_id).first() if job is None: return {'job_id': True, 'haserror': True, 'error': 'Wrong job identifier, "%s" is not recognized as a valid job.' % task_id, 'biorepodata': "{}", 'biorepourl': ''} if job.task is None: return {'job_id': True, 'haserror': True, 'error': 'Task "%s" is PENDING.' % task_id, 'biorepodata': "{}", 'biorepourl': ''} req = job.request now = datetime.now() delta = timedelta(days=DAYS_LIMIT) deletion_date = job.task.date_done + delta jobdelta = now - job.task.date_done biorepodata = {} results = [] for result in job.results: uri='' if jobdelta > delta and not forceurl: d = jobdelta - delta mess = 'File "%s" was deleted %s days ago. Files are kept in Bioscript only %s days.' % (result.fname, d.days, DAYS_LIMIT) is_url = False else: d = delta - jobdelta uri = request.application_url + '/' + get_result_url(result, task_id) is_url = True mess = '<b>File will be deleted in %s days</b>. Files are kept in Bioscript only %s days.' % (d.days, DAYS_LIMIT) if biorepo.SERVICE_UP: dt = { 'file_path': uri, 'description': req.description(), 'project_name': 'Analysis from Bioscript', 'sample_name': req.plugin.info.get('title', '-'), 'sample_type': 'BioScript analysis' } biorepodata['brepo_%s' % result.id] = dt mess += ' You could save it in <a id="brepo_%s" class="biorepourl">Biorepo</a>.' % result.id ## file_url : uri ## desc: req.parameters ## project_name: bioscript ## sample: plugin.info['title'] results.append({'is_file': result.is_file, 'result': result.result, 'mess': mess, 'uri': uri, 'is_url': is_url, 'fname': result.fname, 'deletion-date': deletion_date, }) # additionnal information trace = job.simple_error or '' complete = job.error or '' plug = req.plugin datedone = datetime.strftime(req.date_done, '%a %d %b %Y at %H:%M:%S') plugin_id = plug.id plugin_info = plug.info parameters = req.parameters trace = trace.replace('\n', '<br/>') complete = complete.replace('\n', '<br/>') return {'haserror': False, 'status': job.status, 'task_id': task_id, 'job_id': job.id, 'results': results, 'traceback': trace, 'full_traceback': complete, 'date': datedone, 'plugin_id': plugin_id, 'plugin_info': plugin_info, 'parameters': parameters, 'plugin_generated_id': plug.generated_id, 'biorepodata': json.dumps(biorepodata), 'biorepourl': biorepo.SERVICE_UP and json.dumps(biorepo.BIOREPO_ACTION_URL) or '' }
def listtasks(self): from sqlalchemy.sql import expression tasks = DBSession.query(Task).order_by(expression.desc(Task.date_done))[:10] return {'tasks': tasks}