def models(): """ perform server-to-server communication to retrieve info.json file in all subfolders of model store """ if flask.request.args.get('refresh') == '1': app.config['store_cache'].reset() cached_data = app.config['store_cache'].read() if cached_data is not None: return json.dumps(cached_data) store_urls = app.config['store_url_list'] aggregated_dict = dict() for i, store_url in enumerate(store_urls): if len(store_url) == 0: continue model_list = list() if store_url[-1] != '/': store_base_url = store_url + '/' else: store_base_url = store_url try: response = requests.get(os.path.join(store_base_url, 'master.json')) if response.status_code == 200: json_response = json.loads(response.content) dirs = json_response['children'] msg = json_response['msg'] else: # try to retrieve from directory listing page = requests.get(store_base_url) parser = StoreParser() parser.feed(page.content) if len(parser.get_child_dirs()) > 0: # we have list of subdirectories dirs = [d[:-1] for d in parser.get_child_dirs()] msg = 'Thanks for visiting {}'.format(store_base_url) else: # nothing found, try next URL continue except requests.exceptions.RequestException as e: logger.warning('Skip %s due to error %s' % (store_base_url, e)) continue for subdir in dirs: tmp_dict = {'dir_name': subdir} response = requests.get(os.path.join(store_base_url, subdir, 'info.json')) if response.status_code == 200: tmp_dict['info'] = json.loads(response.content) tmp_dict['id'] = str(uuid.uuid4()) response = requests.get(os.path.join(store_base_url, subdir, 'aux.json')) if response.status_code == 200: tmp_dict['aux'] = json.loads(response.content) model_list.append(tmp_dict) store_info = {'base_url': store_base_url, 'welcome_msg': msg, 'model_list': model_list} aggregated_dict[store_base_url] = store_info app.config['store_cache'].write(aggregated_dict) return json.dumps(aggregated_dict)
def models(): """ perform server-to-server communication to retrieve info.json file in all subfolders of model store """ if flask.request.args.get('refresh') == '1': app.config['store_cache'].reset() cached_data = app.config['store_cache'].read() if cached_data is not None: return json.dumps(cached_data) store_urls = app.config['store_url_list'] aggregated_dict = dict() for i, store_url in enumerate(store_urls): if len(store_url) == 0: continue model_list = list() if store_url[-1] != '/': store_base_url = store_url + '/' else: store_base_url = store_url try: response = requests.get(os.path.join(store_base_url, 'master.json')) if response.status_code == 200: json_response = json.loads(response.content.decode()) dirs = json_response['children'] msg = json_response['msg'] else: # try to retrieve from directory listing page = requests.get(store_base_url) parser = StoreParser() parser.feed(page.content) if len(parser.get_child_dirs()) > 0: # we have list of subdirectories dirs = [d[:-1] for d in parser.get_child_dirs()] msg = 'Thanks for visiting {}'.format(store_base_url) else: # nothing found, try next URL continue except requests.exceptions.RequestException as e: logger.warning('Skip %s due to error %s' % (store_base_url, e)) continue for subdir in dirs: tmp_dict = {'dir_name': subdir} response = requests.get(os.path.join(store_base_url, subdir, 'info.json')) if response.status_code == 200: tmp_dict['info'] = json.loads(response.content.decode()) tmp_dict['id'] = str(uuid.uuid4()) response = requests.get(os.path.join(store_base_url, subdir, 'aux.json')) if response.status_code == 200: tmp_dict['aux'] = json.loads(response.content.decode()) model_list.append(tmp_dict) store_info = {'base_url': store_base_url, 'welcome_msg': msg, 'model_list': model_list} aggregated_dict[store_base_url] = store_info app.config['store_cache'].write(aggregated_dict) return json.dumps(aggregated_dict)
def group(): """ Assign the group for the listed jobs """ not_found = 0 forbidden = 0 group_name = utils.routing.get_request_arg('group_name').strip() job_ids = flask.request.form.getlist('job_ids[]') error = [] for job_id in job_ids: try: job = scheduler.get_job(job_id) if job is None: logger.warning('Job %s not found for group assignment.' % job_id) not_found += 1 continue if not utils.auth.has_permission(job, 'edit'): logger.warning('Group assignment not permitted for job %s' % job_id) forbidden += 1 continue job.group = group_name # update form data so updated name gets used when cloning job if hasattr(job, 'form_data'): job.form_data['form.group_name.data'] = job.group job.emit_attribute_changed('group', job.group) except Exception as e: error.append(e) pass for job_id in job_ids: job = scheduler.get_job(job_id) error = [] if not_found: error.append('%d job%s not found.' % (not_found, '' if not_found == 1 else 's')) if forbidden: error.append('%d job%s not permitted to be regrouped.' % (forbidden, '' if forbidden == 1 else 's')) if len(error) > 0: error = ' '.join(error) raise werkzeug.exceptions.BadRequest(error) return 'Jobs regrouped.'