def page_delete(page_id): try: old_page = Page.get_by_id(page_id) old_page.remove_from_db() return jsonify({"message": "Done"}), 200 except NoSuchPageExistException: abort(404)
def test_aborter_general(test): exc_type = test[0] args = test[1:] with pytest.raises(exc_type) as exc_info: exceptions.abort(*args) assert type(exc_info.value) is exc_type
def on_authorize(self, request): """ Returns a token if user is authorized for action """ scope = None if request.args.get('scope'): type_, image, actions = request.args.get('scope').split(':') actions = actions.split(',') scope = Scope(type_, image, actions) if not request.authorization: abort(401) if not Auth().check_access( request.authorization.username, request.authorization.password, scope ): abort(401) token = JWTToken( request.args['account'], request.args['service'], scope ).generate() res = { 'token': token } return Response(json.dumps(res))
def get_quiz_view(quiz_id): try: QuizProfile.get_by_composite_id(quiz_id,session['email']) abort(401) except NoSuchQuizProfileExistException as e: quiz = Quiz.get_by_id(quiz_id) return render_template('quiz/user/quiz.html', active_quiz=quiz.to_json())
def test_proxy_exception(): orig_resp = Response('Hello World') with pytest.raises(exceptions.HTTPException) as excinfo: exceptions.abort(orig_resp) resp = excinfo.value.get_response({}) assert resp is orig_resp assert resp.get_data() == b'Hello World'
def photo(id, ext): photo = MaterialService.get_by_id(id) if photo is None: abort(404) # http://127.0.0.1:5000/static/uploads/photos/2012628/1340897676.jpg return redirect(photo.url)
def get_post(id, check_author=True): """Get a post and its author by id. Checks that the id exists and optionally that the current user is the author. :param id: id of post to get :param check_author: require the current user to be the author :return: the post with author information :raise 404: if a post with the given id doesn't exist :raise 403: if the current user isn't the author """ post = get_db().execute( 'SELECT p.id, title, body, created, author_id, username' ' FROM post p JOIN user u ON p.author_id = u.id' ' WHERE p.id = ?', (id,) ).fetchone() if post is None: abort(404, "Post id {0} doesn't exist.".format(id)) if check_author and post['author_id'] != g.user['id']: abort(403) return post
def get_object_or_404(model, *criterion): try: rv = model.query.filter(*criterion).one() except (exc.NoResultFound, exc.MultipleResultsFound): abort(404) else: return rv
def _initialize(self, request, *args): self._change_instance(request) app_instance = '%s.instance' % conf('config.app') if self.instance: request.session[app_instance] = self.instance instance_name = self.instance else: instance_name = request.args.get('instance', None) if not instance_name: instance_name = request.session.get(app_instance, None) if instance_name: request.session[app_instance] = instance_name else: abort(Response( self._instance_selection(request), mimetype='text/html') ) self.instance_name = instance_name self.instance_config = conf('crud.instances.%s.%s' % ( conf('config.app'), self.instance_name )) self._get_database() self._get_session()
def wsgi_app(self, environ, start_response): """This is the actual WSGI application. This is not implemented in :meth:`__call__` so that middlewares can be applied without losing a reference to the class. So instead of doing this:: app = MyMiddleware(app) It's a better idea to do this instead:: app.wsgi_app = MyMiddleware(app.wsgi_app) Then you still have the original application object around and can continue to call methods on it. This idea comes from `Flask`_. :param environ: A WSGI environment. :param start_response: A callable accepting a status code, a list of headers and an optional exception context to start the response. """ cleanup = True try: request = self.request_class(environ) if request.method not in self.allowed_methods: abort(501) match = self.router.match(request) response = self.router.dispatch(request, match) except Exception, e: try: response = self.handle_exception(request, e) except HTTPException, e: response = self.make_response(request, e)
def paginate(self, page, per_page=20, error_out=True, sql=None): """Returns `per_page` items from page `page`. By default it will abort with 404 if no items were found and the page was larger than 1. This behavor can be disabled by setting `error_out` to `False`. Returns an :class:`Pagination` object. """ sql = sql.lower() countsql = 'select count(*) as count from ({0}) tmp_count '.format(sql) #+ sql[sql.find('from'):] #print(countsql) if 'limit' in sql: print("please del your limit keywords") sql = sql + ' limit {0},{1}'.format((page - 1) * per_page, per_page) #print(sql) items =self.db().query(sql) if error_out and page < 1: abort(404) # No need to count if we're on the first page and there are fewer # items than we expected. if page == 1 and len(items) < per_page: total = len(items) else: #total = self.order_by(None).count() total = self.db().get(countsql).count return Pagination(self, page, per_page, total, items)
def test_abort(code, name): class_name = werkzeug_rfc7xx._msg_to_class_name(name) class_obj = getattr(werkzeug_rfc7xx, class_name) with pytest.raises(class_obj) as exc_info: abort(code) assert type(exc_info.value) is class_obj
def delete(id): """ 删除 entry 返回 json """ int_value_verify(id) old_entry = EntryService.get_by_id(id) if not old_entry: return jsonify(success=False, redirect_url='/', data_id=id) # 防止删除别人的帖子 if old_entry.author_id != g.user.id and not g.user.is_supervisor: abort(403) next = request.args.get('next', None) if next is None: next = url_for('portal.category', category=old_entry.category.slug) flash(_("The %(name)s has been deleted", name = old_entry.title), "success") return jsonify(success=EntryService.delete(old_entry), redirect_url=next, data_id=id)
def get_geo(): """ Get segments contained in database """ # Read JSON from request json = request.get_json() # Check bounding box in json request if (json is None) or not ('start_lat' in json and 'start_lon' in json and 'end_lat' in json and 'end_lon' in json): abort(400, 'bounding box missing') lat_min = min(json.get('start_lat'), json.get('end_lat')) lat_max = max(json.get('start_lat'), json.get('end_lat')) lon_min = min(json.get('start_lon'), json.get('end_lon')) lon_max = max(json.get('start_lon'), json.get('end_lon')) query = text('SELECT gid, osm_id, ST_AsText(ways.the_geom) ' 'FROM ways ' 'WHERE the_geom && ST_MakeEnvelope(:lon_min, :lat_min, :lon_max, :lat_max, 4326) ' 'LIMIT 100;') query_result = db.engine.execute(query, lon_min=lon_min, lat_min=lat_min, lon_max=lon_max, lat_max=lat_max) segments = [] for row in query_result: segments.append({ 'gid': row[0], 'osm_id': row[1], 'coordinates': coordinates_from_linestring(row[2]) }) return jsonify({'segment_count': query_result.rowcount, 'segments': segments})
def serve_slowly(path): def octoberfest(): for bb in range(99, 2, -1): yield ("0" * 65535) + "\n" # ENOUGH TO FILL THE INCOMING BUFFER Thread.sleep(1.0 / RATE) yield CNV.unicode2utf8( expand_template( "{{num}} bottles of beer on the wall! {{num}} bottles of beer! Take one down, pass it around! {{less}} bottles of beer on he wall!\n", {"num": bb, "less": bb - 1}, ) ) yield ("0" * 65535) + "\n" # ENOUGH TO FILL THE INCOMING BUFFER yield CNV.unicode2utf8( u"2 bottles of beer on the wall! 2 bottles of beer! Take one down, pass it around! 1 bottle of beer on he wall!\n" ) yield ("0" * 65535) + "\n" # ENOUGH TO FILL THE INCOMING BUFFER yield CNV.unicode2utf8( u"1 bottle of beer on the wall! 1 bottle of beer! Take one down, pass it around! 0 bottles of beer on he wall.\n" ) try: ## FORWARD RESPONSE return Response(octoberfest(), direct_passthrough=True, status=200) # FOR STREAMING except Exception, e: abort(400)
def project(): try: return render_template("project.html.jinja", **get_context()) except NoResultFound: abort(404) finally: db.Session.remove()
def test_proxy_exception(): """Proxy exceptions""" orig_resp = Response('Hello World') try: abort(orig_resp) except exceptions.HTTPException, e: resp = e.get_response({})
def photo(photo_id, ext): photo = PictureFile.get_by_id(photo_id) if photo is None: abort(404) # http://127.0.0.1:5000/static/uploads/photos/2012628/1340897676.jpg return redirect(photo.url)
def validate_db_response(db_response_par): logger.debug("validate_db_response(db_response_par)") if 'status_code' in db_response_par: status = db_response_par['status_code'] if status not in SUCCESS_STATUS_CODES: # Generate error message error_message = ERROR_DB_PREFIX if 'msg' in db_response_par: error_message += db_response_par['msg'] if 'ErrorText' in db_response_par: error_message += ', ErrorText: ' + db_response_par['ErrorText'].replace('\"', '') # Abort with statusCode and errorMessage logger.debug('Abort with: ' + str(status) + ', ' + error_message) abort(int(status), str(error_message)) else: abort(500, ERROR_DB_NO_STATUS_CODE) logger.debug("validate_db_response(db_response_par): OK") return db_response_par
def get_knn_statics(): if request.method != 'POST': abort(403) results = [] prefix_path = '/var/tmp/stylometric_app/knn_upload/' f = request.files['file'] path = os.path.join(prefix_path, secure_filename(f.filename)) f.save(path) if not os.path.exists(path): abort(403) qp = plaintext_data_etl.read_file_and_get_doc_list(path) author_hash = dict([(row['author_id'], row['author_name']) for row in data_warehouse.get_all_author_id_and_name()]) try: authors = session['authors'] feature_list, author_list = data_warehouse.get_features_from_database_fact_by_list_of_author_id(authors) except KeyError: feature_list, author_list = data_warehouse.get_all_features_from_database_fact() knn_proba = cknn.get_query_set_probabilistic(feature_list, author_list, qp) authors = list(set(author_list)) for idx in range(len(set(authors))): results.append((author_hash.get(authors[idx]), knn_proba[idx])) session.clear() return jsonify(dict(results))
def get_author_details(): if request.method == 'GET': author_id = request.args.get('author_id') doc_num = request.args.get('doc_num') if author_id is None and doc_num is None: return render_template('data_visualize/select_author.html', title='Select an author', content=u'Select an author in the following list and the system will display the ' u'details of that author you selected', authors_list=data_warehouse.get_all_author_id_and_name() ) if request.method == 'POST': try: author_id = int(request.form['author_id']) doc_num = data_warehouse.get_num_of_doc_written_by_an_author(author_id) except ValueError: abort(403) try: author_id = int(author_id) doc_num = int(doc_num) except ValueError: abort(403) author_name = data_warehouse.get_author_name_by_id(author_id) return render_template('data_visualize/author_details.html', title=author_name, content=Markup(u'You are now looking at <strong>{}</strong>. There are <strong>{}</strong> ' u'of documents written by {} stored in our database.' .format(author_name, doc_num, author_name)), doc_list=data_warehouse.get_all_docs_by_author_id(author_id) )
def discount_codes_delete(discount_code_id=None): discount_code = InvoicingManager.get_discount_code(discount_code_id) if not discount_code: abort(404) delete_from_db(discount_code, "Discount code deleted") flash("The discount code has been deleted.", "warning") return redirect(url_for('.discount_codes_view'))
def load(client_id): client_data = db.get(client_id) if client_data: return Tenant.from_map(client_data) else: _log.warn("Cannot find client: %s" % client_id) abort(400)
def add_recipe(): if not session.get('logged_in'): abort(401) db.add_recipe(g.db, Recipe(request.form['title'], request.form['text'])) flash('New entry was successfully added') return redirect(url_for('list_recipes'))
def update_recipe(id): if not session.get('logged_in'): abort(401) recipe = Recipe(request.form['title'], request.form['description'], id) db.update_recipe(g.db, recipe) return redirect(url_for('list_recipes'))
def load_from_web(self, form): try: self.name = form['name'] self.born_date = load_date(form['born_date']) self.kinship = form['kinship'] except BadRequestKeyError: abort(403)
def load_from_web(self, form, files): try: self.department = Department() self.enterprise = Enterprise() self.role = Role() self.name = form['name'] self.department.load(int(form['departament'])) self.enterprise.load(int(form['enterprise'])) self.role.load(int(form['role'])) self.registry = load_int(form['registry']) self.name_tag = load_int(form['name_tag']) self.active = form['active'] == 'Y' self.cpf = load_int(form['cpf']) self.sex = nwe(form['sex']) self.rg = nwe(form['rg']) self.rg_issuing = nwe(form['rg_issuing']) self.rg_date = nwe(form['rg_date']) self.born_date = load_date(form['born_date']) self.cnh = nwe(form['cnh']) self.cnh_category = nwe(form['cnh_category']) self.ctps = nwe(form['ctps']) self.ctps_series = nwe(form['ctps_series']) self.ctps_fu = nwe(form['ctps_fu']) self.ctps_date = nwe(form['ctps_date']) self.nacionality = nwe(form['nacionality']) self.place_of_birth = nwe(form['place_of_birth']) self.phone = load_int_as_str(form['phone']) self.cellphone = load_int_as_str(form['cellphone']) self.zipcode = load_int_as_str(form['zipcode']) self.address = nwe(form['address']) self.address_adjunct = nwe(form['address_adjunct']) self.neighborhood = nwe(form['neighborhood']) self.city = nwe(form['city']) self.fu = nwe(form['fu']) self.father_name = nwe(form['father_name']) self.mother_name = nwe(form['mother_name']) self.scholarity = load_int(form['scholarity']) self.scholarity_complete = form['scholarity_complete'] == 'Y' self.graduation = nwe(form['graduation']) self.post_graduation = nwe(form['post_graduation']) self.civil_state = nwe(form['civil_state']) self.spouse = nwe(form['spouse']) self.admission_date = load_date(form['admission_date']) self.demission_date = load_date(form['demission_date']) self.pis_date = load_date(form['pis_date']) self.pis_number = load_int(form['pis_number']) self.meal_on_enterprise = form['meal_on_enterprise'] == 'Y' self.salary = load_decimal(form['salary']) self.reservist = nwe(form['reservist']) self.bank = nwe(form['bank']) self.agency = nwe(form['agency']) self.account = nwe(form['account']) self.winthor_registry = load_int(form['winthor_registry']) self.transport_voucher = form['transport_voucher'] == 'Y' data = files['photo'].stream.read() if data != b'': self.photo = data except BadRequestKeyError: abort(403)
def last_id(): c = get_db().execute( "SELECT max(id) from post" ) if c == None: abort(404, "No Posts exist") return str(c.fetchone()[0])
def __init__(self, row): if not row: logger.log('error', 'model', 'invalid id') abort(418) self.id = row.id self.nodes = dict() if hasattr(row, 'nodes') and row.nodes: for node in row.nodes: self.nodes[g.nodes[node['f1']]] = node['f2'] # f1 = node id, f2 = value self.name = row.name self.root = None self.description = row.description if row.description else '' self.system_type = row.system_type self.created = row.created self.modified = row.modified self.first = int(row.first) if hasattr(row, 'first') and row.first else None self.last = int(row.last) if hasattr(row, 'last') and row.last else None self.class_ = g.classes[row.class_code] self.dates = {} self.view_name = None # view_name is used to build urls if self.system_type == 'file': self.view_name = 'file' elif self.class_.code in app.config['CODE_CLASS']: self.view_name = app.config['CODE_CLASS'][self.class_.code] self.table_name = self.view_name # table_name is used to build tables if self.view_name == 'place': self.table_name = self.system_type.replace(' ', '-')
def autor_livros_todos(params): autor = Autores.collection().find_one({'_id': ObjectId(params.get('autor'))}) if autor: autor = Autores.make_autor_from_dict(Autores(), autor) return JSONEncoder().encode(autor.get_livros()) else: abort(500)
def handle_exception(message, status_code=500): return exceptions.abort(make_response(message, status_code))
def add(map_identifier, topic_identifier): topic_store = get_topic_store() topic_map = topic_store.get_topic_map(map_identifier) if topic_map is None: abort(404) if current_user.id != topic_map.user_identifier: abort(403) topic = topic_store.get_topic( map_identifier, topic_identifier, resolve_attributes=RetrievalMode.RESOLVE_ATTRIBUTES, ) if topic is None: abort(404) form_link_title = "" form_link_url = "" form_link_scope = "*" error = 0 if request.method == "POST": form_link_title = request.form["link-title"].strip() form_link_url = request.form["link-url"].strip() form_link_scope = request.form["link-scope"].strip() # If no values have been provided set their default values if not form_link_scope: form_link_scope = "*" # Universal scope # Validate form inputs if not form_link_title: error = error | 1 if not form_link_url: error = error | 2 if not topic_store.topic_exists(topic_map.identifier, form_link_scope): error = error | 4 if error != 0: flash( "An error occurred when submitting the form. Please review the warnings and fix accordingly.", "warning", ) else: link_occurrence = Occurrence( instance_of="url", topic_identifier=topic.identifier, scope=form_link_scope, resource_ref=form_link_url, ) title_attribute = Attribute( "title", form_link_title, link_occurrence.identifier, data_type=DataType.STRING, ) # Persist objects to the topic store topic_store.set_occurrence(topic_map.identifier, link_occurrence) topic_store.set_attribute(topic_map.identifier, title_attribute) flash("Link successfully added.", "success") return redirect( url_for( "link.index", map_identifier=topic_map.identifier, topic_identifier=topic.identifier, )) return render_template( "link/add.html", error=error, topic_map=topic_map, topic=topic, link_title=form_link_title, link_url=form_link_url, link_scope=form_link_scope, )
def edit(map_identifier, topic_identifier, link_identifier): topic_store = get_topic_store() topic_map = topic_store.get_topic_map(map_identifier) if topic_map is None: abort(404) if current_user.id != topic_map.user_identifier: abort(403) topic = topic_store.get_topic( map_identifier, topic_identifier, resolve_attributes=RetrievalMode.RESOLVE_ATTRIBUTES, ) if topic is None: abort(404) link_occurrence = topic_store.get_occurrence( map_identifier, link_identifier, resolve_attributes=RetrievalMode.RESOLVE_ATTRIBUTES, ) form_link_title = link_occurrence.get_attribute_by_name("title").value form_link_scope = link_occurrence.scope error = 0 if request.method == "POST": form_link_title = request.form["link-title"].strip() form_link_scope = request.form["link-scope"].strip() # If no values have been provided set their default values if not form_link_scope: form_link_scope = "*" # Universal scope # Validate form inputs if not form_link_title: error = error | 1 if not topic_store.topic_exists(topic_map.identifier, form_link_scope): error = error | 2 if error != 0: flash( "An error occurred when submitting the form. Please review the warnings and fix accordingly.", "warning", ) else: # Update link's title if it has changed if link_occurrence.get_attribute_by_name( "title").value != form_link_title: topic_store.update_attribute_value( topic_map.identifier, link_occurrence.get_attribute_by_name("title").identifier, form_link_title, ) # Update link's scope if it has changed if link_occurrence.scope != form_link_scope: topic_store.update_occurrence_scope(map_identifier, link_occurrence.identifier, form_link_scope) flash("Link successfully updated.", "success") return redirect( url_for( "link.index", map_identifier=topic_map.identifier, topic_identifier=topic.identifier, )) return render_template( "link/edit.html", error=error, topic_map=topic_map, topic=topic, link_identifier=link_occurrence.identifier, link_title=form_link_title, link_scope=form_link_scope, )
def data(request, pool, model): User = pool.get('res.user') Lang = pool.get('ir.lang') try: Model = pool.get(model) except KeyError: abort(HTTPStatus.NOT_FOUND) transaction = Transaction() context = User(transaction.user).get_preferences(context_only=True) language = request.args.get('l') if language: context['language'] = language try: domain = json.loads(request.args.get('d', '[]'), object_hook=JSONDecoder()) except json.JSONDecodeError: abort(HTTPStatus.BAD_REQUEST) try: ctx = json.loads(request.args.get('c', '{}'), object_hook=JSONDecoder()) except json.JSONDecoder: abort(HTTPStatus.BAD_REQUEST) for key in list(ctx.keys()): if key.startswith('_') and key != '_datetime': del ctx[key] context.update(ctx) limit = None offset = 0 if 's' in request.args: try: limit = int(request.args.get('s')) if 'p' in request.args: offset = int(request.args.get('p')) * limit except ValueError: abort(HTTPStatus.BAD_REQUEST) if 'o' in request.args: order = [(o.split(',', 1) + [''])[:2] for o in request.args.getlist('o')] else: order = None fields_names = request.args.getlist('f') encoding = request.args.get('enc', 'UTF-8') delimiter = request.args.get('dl', ',') quotechar = request.args.get('qc', '"') try: header = bool(int(request.args.get('h', True))) locale_format = bool(int(request.args.get('loc', False))) except ValueError: abort(HTTPStatus.BAD_REQUEST) with transaction.set_context(**context): lang = Lang.get(transaction.language) def format_(row): for i, value in enumerate(row): if locale_format: if isinstance(value, Number): value = lang.format('%.12g', value) elif isinstance(value, (dt.date, dt.datetime)): value = lang.strftime(value) elif isinstance(value, bool): value = int(value) row[i] = value return row try: if domain and isinstance(domain[0], (int, float)): rows = Model.export_data(domain, fields_names) else: rows = Model.export_data_domain(domain, fields_names, limit=limit, offset=offset, order=order) except (ValueError, KeyError): abort(HTTPStatus.BAD_REQUEST) data = io.StringIO(newline='') writer = csv.writer(data, delimiter=delimiter, quotechar=quotechar) if header: writer.writerow(fields_names) for row in rows: writer.writerow(format_(row)) data = data.getvalue().encode(encoding) filename = slugify(Model.__names__()['model']) + '.csv' filename = filename.encode('latin-1', 'ignore') response = Response(data, mimetype='text/csv; charset=' + encoding) response.headers.add('Content-Disposition', 'attachment', filename=filename) response.headers.add('Content-Length', len(data)) return response
def exit(model: Any, word: str): query, options = SearchWords.seeker(SearchByWord(word)) data = model.find_one(query, options) if not bool(int(getenv('WILL_HAVE_WORDS_REPETEAD', 0))) and data: abort(400, f'Already exists word <{word}>, aborting process')
def info_misto(misto_id): misto = db.session.query(OckovaciMisto).filter(OckovaciMisto.id == misto_id).one_or_none() if misto is None: abort(404) registrace_info = db.session.query("vekova_skupina", "povolani", "fronta_pocet", "pomer", "rezervace_nove", "rezervace_celkem").from_statement(text( """ select t1.vekova_skupina, t1.povolani, fronta_pocet, round((rezervace_nove*100.0)/rezervace_celkem) pomer, rezervace_nove, rezervace_celkem from ( select vekova_skupina, povolani, sum(pocet) fronta_pocet from ockovani_registrace where rezervace=False and ockovaci_misto_id=:misto_id and import_id=:import_id group by vekova_skupina, povolani) t1 left join ( select vekova_skupina, povolani, sum(case when rezervace=true then pocet else 0 end) as rezervace_nove, NULLIF(sum(pocet), 0) as rezervace_celkem from ockovani_registrace where datum>now()-'7 days'::interval and ockovaci_misto_id=:misto_id and import_id=:import_id group by vekova_skupina, povolani ) t2 on (t1.vekova_skupina = t2.vekova_skupina and t1.povolani = t2.povolani) order by (t1.vekova_skupina, t1.povolani) """ )).params(misto_id=misto_id) \ .params(import_id=_last_import_id()) \ .all() ampule_info = db.session.query("vyrobce", "operace", "sum").from_statement(text( """ select * from (select sum(pocet_davek ) as sum,vyrobce,\'Příjem\' as operace from ockovani_distribuce where ockovaci_misto_id=:misto_id and akce=\'Příjem\' group by vyrobce union (select coalesce(sum(pocet_davek ),0)as sum,vyrobce,\'Výdej\' as operace from ockovani_distribuce where ockovaci_misto_id=:misto_id and akce=\'Výdej\' group by vyrobce) union (select coalesce(sum(pocet_davek ),0)as sum,vyrobce,\'Příjem odjinud\' as operace from ockovani_distribuce where cilove_ockovaci_misto_id=:misto_id and akce=\'Výdej\' group by vyrobce) union ( select sum(pouzite_davky), vyrobce, \'Očkováno\' as operace from ockovani_spotreba where ockovaci_misto_id=:misto_id group by vyrobce) union (select sum(znehodnocene_davky), vyrobce, \'Zničeno\' as operace from ockovani_spotreba where ockovaci_misto_id=:misto_id group by vyrobce) ) as tbl order by vyrobce, operace """ )).params(misto_id=misto_id).all() total = _compute_vaccination_stats(ampule_info) # Source data for plotly graph registrace_overview = db.session.query( OckovaniRegistrace.datum, func.sum(OckovaniRegistrace.pocet).label("pocet_registrovanych")) \ .filter(OckovaniRegistrace.import_id == _last_import_id()) \ .filter(OckovaniRegistrace.ockovaci_misto_id == misto.id) \ .filter(OckovaniRegistrace.datum.between(date.today() - timedelta(days=365), date.today())) \ .group_by(OckovaniRegistrace.datum) \ .order_by(OckovaniRegistrace.datum).all() registrace_overview_terminy = db.session.query( OckovaniRegistrace.datum_rezervace, func.sum(OckovaniRegistrace.pocet).label("pocet_terminu")) \ .filter(OckovaniRegistrace.import_id == _last_import_id()) \ .filter(OckovaniRegistrace.ockovaci_misto_id == misto.id) \ .filter(OckovaniRegistrace.datum_rezervace > date.today() - timedelta(days=365)) \ .group_by(OckovaniRegistrace.datum_rezervace) \ .order_by(OckovaniRegistrace.datum_rezervace).all() # Compute boundary dates for rangeslider in time series chart dates = [i.datum for i in registrace_overview] + [j.datum_rezervace for j in registrace_overview_terminy] return render_template('misto.html', misto=misto, total=total, registrace_info=registrace_info, last_update=_last_import_modified(), now=_now(), registrace_overview=registrace_overview, registrace_overview_terminy=registrace_overview_terminy, end_date=max(dates), start_date=min(dates))
def abort(code=404): return abort(code)
def check_read_access(demande: Demande | None): """Raises 'Forbidden' if current user doesn't have access to demande.""" if not demande: abort(404) if not has_read_access(demande): raise Forbidden()
def delete(map_identifier, topic_identifier, image_identifier): topic_store = get_topic_store() topic_map = topic_store.get_topic_map(map_identifier) if topic_map is None: abort(404) if current_user.id != topic_map.user_identifier: abort(403) topic = topic_store.get_topic( map_identifier, topic_identifier, resolve_attributes=RetrievalMode.RESOLVE_ATTRIBUTES, ) if topic is None: abort(404) image_occurrence = topic_store.get_occurrence( map_identifier, image_identifier, resolve_attributes=RetrievalMode.RESOLVE_ATTRIBUTES, ) form_image_title = image_occurrence.get_attribute_by_name("title").value form_image_resource_ref = image_occurrence.resource_ref form_image_scope = image_occurrence.scope if request.method == "POST": # Delete image occurrence from topic store topic_store.delete_occurrence(map_identifier, image_occurrence.identifier) # Delete image from file system image_file_path = os.path.join( bp.root_path, RESOURCES_DIRECTORY, str(map_identifier), topic_identifier, image_occurrence.resource_ref, ) if os.path.exists(image_file_path): os.remove(image_file_path) flash("Image successfully deleted.", "warning") return redirect( url_for( "image.index", map_identifier=topic_map.identifier, topic_identifier=topic.identifier, )) return render_template( "image/delete.html", topic_map=topic_map, topic=topic, image_identifier=image_occurrence.identifier, image_title=form_image_title, image_resource_ref=form_image_resource_ref, image_scope=form_image_scope, )
def entity_view(id_: int) -> Union[str, Response]: if id_ in g.nodes: # Nodes have their own view entity = g.nodes[id_] if not entity.root: if entity.class_.name == 'administrative_unit': tab_hash = '#menu-tab-places_collapse-' elif entity.standard: tab_hash = '#menu-tab-standard_collapse-' elif entity.value_type: tab_hash = '#menu-tab-value_collapse-' else: tab_hash = '#menu-tab-custom_collapse-' return redirect(url_for('node_index') + tab_hash + str(id_)) elif id_ in g.reference_systems: entity = g.reference_systems[id_] else: entity = Entity.get_by_id(id_, nodes=True, aliases=True) if not entity.class_.view: flash(_("This entity can't be viewed directly."), 'error') abort(400) event_links = None # Needed for actor overlays = None # Needed for place tabs = {'info': Tab('info')} if isinstance(entity, Node): tabs['subs'] = Tab('subs', entity) tabs['entities'] = Tab('entities', entity) root = g.nodes[entity.root[-1]] if entity.root else None if root and root.value_type: # pragma: no cover tabs['entities'].table.header = [ _('name'), _('value'), _('class'), _('info') ] for item in entity.get_linked_entities(['P2', 'P89'], inverse=True, nodes=True): if item.class_.name in ['location', 'reference_system']: continue # pragma: no cover if item.class_.name == 'object_location': # pragma: no cover item = item.get_linked_entity_safe('P53', inverse=True) data = [link(item)] if root and root.value_type: # pragma: no cover data.append(format_number(item.nodes[entity])) data.append(item.class_.label) data.append(item.description) tabs['entities'].table.rows.append(data) for sub_id in entity.subs: sub = g.nodes[sub_id] tabs['subs'].table.rows.append( [link(sub), sub.count, sub.description]) if not tabs[ 'entities'].table.rows: # If no entities available get links with this type_id tabs['entities'].table.header = [_('domain'), _('range')] for row in Link.get_entities_by_node(entity): tabs['entities'].table.rows.append([ link(Entity.get_by_id(row['domain_id'])), link(Entity.get_by_id(row['range_id'])) ]) elif isinstance(entity, ReferenceSystem): for form_id, form in entity.get_forms().items(): tabs[form['name']] = Tab(form['name'], origin=entity) tabs[form['name']].table = Table( [_('entity'), 'id', _('precision')]) for link_ in entity.get_links('P67'): name = link_.description if entity.resolver_url: name = \ '<a href="{url}" target="_blank" rel="noopener noreferrer">{name}</a>'.format( url=entity.resolver_url + name, name=name) tab_name = link_.range.class_.name tabs[tab_name].table.rows.append( [link(link_.range), name, link_.type.name]) for form_id, form in entity.get_forms().items(): if not tabs[form['name']].table.rows and is_authorized('manager'): tabs[form['name']].buttons = [ button( _('remove'), url_for('reference_system_remove_form', system_id=entity.id, form_id=form_id)) ] elif entity.class_.view == 'actor': for name in ['source', 'event', 'relation', 'member_of', 'member']: tabs[name] = Tab(name, entity) event_links = entity.get_links(['P11', 'P14', 'P22', 'P23', 'P25'], True) for link_ in event_links: event = link_.domain places = event.get_linked_entities(['P7', 'P26', 'P27']) link_.object_ = None for place in places: object_ = place.get_linked_entity_safe('P53', True) entity.linked_places.append(object_) link_.object_ = object_ # Needed later for first/last appearance info first = link_.first if not link_.first and event.first: first = '<span class="inactive">' + event.first + '</span>' last = link_.last if not link_.last and event.last: last = '<span class="inactive">' + event.last + '</span>' data = [ link(event), event.class_.label, link(link_.type), first, last, link_.description ] data = add_edit_link( data, url_for('involvement_update', id_=link_.id, origin_id=entity.id)) data = add_remove_link(data, link_.domain.name, link_, entity, 'event') tabs['event'].table.rows.append(data) for link_ in entity.get_links('OA7') + entity.get_links('OA7', True): type_ = '' if entity.id == link_.domain.id: related = link_.range if link_.type: type_ = link(link_.type.get_name_directed(), url_for('entity_view', id_=link_.type.id)) else: related = link_.domain if link_.type: type_ = link(link_.type.get_name_directed(True), url_for('entity_view', id_=link_.type.id)) data = [ type_, link(related), link_.first, link_.last, link_.description ] data = add_edit_link( data, url_for('relation_update', id_=link_.id, origin_id=entity.id)) data = add_remove_link(data, related.name, link_, entity, 'relation') tabs['relation'].table.rows.append(data) for link_ in entity.get_links('P107', True): data = [ link(link_.domain), link(link_.type), link_.first, link_.last, link_.description ] data = add_edit_link( data, url_for('member_update', id_=link_.id, origin_id=entity.id)) data = add_remove_link(data, link_.domain.name, link_, entity, 'member-of') tabs['member_of'].table.rows.append(data) if entity.class_.name != 'group': del tabs['member'] else: for link_ in entity.get_links('P107'): data = [ link(link_.range), link(link_.type), link_.first, link_.last, link_.description ] data = add_edit_link( data, url_for('member_update', id_=link_.id, origin_id=entity.id)) data = add_remove_link(data, link_.range.name, link_, entity, 'member') tabs['member'].table.rows.append(data) elif entity.class_.view == 'artifact': tabs['source'] = Tab('source', entity) elif entity.class_.view == 'event': for name in ['subs', 'source', 'actor']: tabs[name] = Tab(name, entity) for sub_event in entity.get_linked_entities('P117', inverse=True, nodes=True): tabs['subs'].table.rows.append(get_base_table_data(sub_event)) tabs['actor'].table.header.insert( 5, _('activity')) # Add a table column for activity for link_ in entity.get_links(['P11', 'P14', 'P22', 'P23']): first = link_.first if not link_.first and entity.first: first = '<span class="inactive">' + entity.first + '</span>' last = link_.last if not link_.last and entity.last: last = '<span class="inactive">' + entity.last + '</span>' data = [ link(link_.range), link_.range.class_.label, link_.type.name if link_.type else '', first, last, g.properties[link_.property.code].name_inverse, link_.description ] data = add_edit_link( data, url_for('involvement_update', id_=link_.id, origin_id=entity.id)) data = add_remove_link(data, link_.range.name, link_, entity, 'actor') tabs['actor'].table.rows.append(data) entity.linked_places = [ location.get_linked_entity_safe('P53', True) for location in entity.get_linked_entities(['P7', 'P26', 'P27']) ] elif entity.class_.view == 'file': for name in [ 'source', 'event', 'actor', 'place', 'feature', 'stratigraphic_unit', 'artifact', 'human_remains', 'reference', 'type' ]: tabs[name] = Tab(name, entity) entity.image_id = entity.id if get_file_path(entity.id) else None for link_ in entity.get_links('P67'): range_ = link_.range data = get_base_table_data(range_) data = add_remove_link(data, range_.name, link_, entity, range_.class_.name) tabs[range_.class_.view].table.rows.append(data) for link_ in entity.get_links('P67', True): data = get_base_table_data(link_.domain) data.append(link_.description) data = add_edit_link( data, url_for('reference_link_update', link_id=link_.id, origin_id=entity.id)) data = add_remove_link(data, link_.domain.name, link_, entity, 'reference') tabs['reference'].table.rows.append(data) elif entity.class_.view == 'place': tabs['source'] = Tab('source', entity) tabs['event'] = Tab('event', entity) tabs['reference'] = Tab('reference', entity) if entity.class_.name == 'place': tabs['actor'] = Tab('actor', entity) tabs['feature'] = Tab('feature', origin=entity) elif entity.class_.name == 'feature': tabs['stratigraphic_unit'] = Tab('stratigraphic_unit', origin=entity) elif entity.class_.name == 'stratigraphic_unit': tabs['find'] = Tab('find', origin=entity) tabs['human_remains'] = Tab('human_remains', origin=entity) entity.location = entity.get_linked_entity_safe('P53', nodes=True) event_ids = [ ] # Keep track of already inserted events to prevent doubles for event in entity.location.get_linked_entities(['P7', 'P26', 'P27'], inverse=True): tabs['event'].table.rows.append(get_base_table_data(event)) event_ids.append(event.id) for event in entity.get_linked_entities('P24', inverse=True): if event.id not in event_ids: # Don't add again if already in table tabs['event'].table.rows.append(get_base_table_data(event)) if 'actor' in tabs: for link_ in entity.location.get_links(['P74', 'OA8', 'OA9'], inverse=True): actor = Entity.get_by_id(link_.domain.id) tabs['actor'].table.rows.append([ link(actor), g.properties[link_.property.code].name, actor.class_.name, actor.first, actor.last, actor.description ]) elif entity.class_.view == 'reference': for name in [ 'source', 'event', 'actor', 'place', 'feature', 'stratigraphic_unit', 'human_remains', 'artifact', 'file' ]: tabs[name] = Tab(name, entity) for link_ in entity.get_links('P67'): range_ = link_.range data = get_base_table_data(range_) data.append(link_.description) data = add_edit_link( data, url_for('reference_link_update', link_id=link_.id, origin_id=entity.id)) data = add_remove_link(data, range_.name, link_, entity, range_.class_.name) tabs[range_.class_.view].table.rows.append(data) elif entity.class_.view == 'source': for name in [ 'actor', 'artifact', 'feature', 'event', 'human_remains', 'place', 'stratigraphic_unit', 'text' ]: tabs[name] = Tab(name, entity) for text in entity.get_linked_entities('P73', nodes=True): tabs['text'].table.rows.append([ link(text), next(iter(text.nodes)).name if text.nodes else '', text.description ]) for link_ in entity.get_links('P67'): range_ = link_.range data = get_base_table_data(range_) data = add_remove_link(data, range_.name, link_, entity, range_.class_.name) tabs[range_.class_.view].table.rows.append(data) if entity.class_.view in [ 'actor', 'artifact', 'event', 'place', 'source', 'type' ]: if entity.class_.view != 'reference' and not isinstance(entity, Node): tabs['reference'] = Tab('reference', entity) if entity.class_.view == 'artifact': tabs['event'] = Tab('event', entity) for link_ in entity.get_links('P25', True): data = get_base_table_data(link_.domain) tabs['event'].table.rows.append(data) tabs['file'] = Tab('file', entity) entity.image_id = entity.get_profile_image_id() tabs['file'].table.header.append(uc_first(_('overlay'))) for link_ in entity.get_links('P67', inverse=True): domain = link_.domain data = get_base_table_data(domain) if domain.class_.view == 'file': # pragma: no cover extension = data[3] data.append( get_profile_image_table_link(domain, entity, extension, entity.image_id)) if not entity.image_id and extension in app.config[ 'DISPLAY_FILE_EXTENSIONS']: entity.image_id = domain.id if entity.class_.view == 'place' and is_authorized('editor') and \ current_user.settings['module_map_overlay']: overlays = Overlay.get_by_object(entity) if extension in app.config['DISPLAY_FILE_EXTENSIONS']: if domain.id in overlays: data = add_edit_link( data, url_for('overlay_update', id_=overlays[domain.id].id)) else: data.append( link( _('link'), url_for('overlay_insert', image_id=domain.id, place_id=entity.id, link_id=link_.id))) else: # pragma: no cover data.append('') if domain.class_.view not in ['source', 'file']: data.append(link_.description) data = add_edit_link( data, url_for('reference_link_update', link_id=link_.id, origin_id=entity.id)) if domain.class_.view == 'reference_system': entity.reference_systems.append(link_) continue data = add_remove_link(data, domain.name, link_, entity, domain.class_.view) tabs[domain.class_.view].table.rows.append(data) structure = None # Needed for place gis_data = None # Needed for place if entity.class_.view in ['artifact', 'place']: structure = get_structure(entity) if structure: for item in structure['subunits']: tabs[item.class_.name].table.rows.append( get_base_table_data(item)) gis_data = Gis.get_all([entity], structure) if gis_data['gisPointSelected'] == '[]' \ and gis_data['gisPolygonSelected'] == '[]' \ and gis_data['gisLineSelected'] == '[]' \ and (not structure or not structure['super_id']): gis_data = {} if not gis_data: gis_data = Gis.get_all( entity.linked_places) if entity.linked_places else None entity.info_data = get_entity_data(entity, event_links=event_links) tabs['note'] = Tab('note', entity) for note in current_user.get_notes_by_entity_id(entity.id): data = [ format_date(note['created']), uc_first(_('public')) if note['public'] else uc_first(_('private')), link(User.get_by_id(note['user_id'])), note['text'], '<a href="{url}">{label}</a>'.format(url=url_for('note_view', id_=note['id']), label=uc_first(_('view'))) ] tabs['note'].table.rows.append(data) return render_template( 'entity/view.html', entity=entity, tabs=tabs, buttons=add_buttons(entity), structure=structure, # Needed for place views overlays=overlays, # Needed for place views gis_data=gis_data, title=entity.name, crumbs=add_crumbs(entity, structure))
def ensure_db(redirect=URLS['login']): """ Used by client when a :meth:`http.route()<openerp.http.route>` has authentication method parameter as "none" (``auth='none'``) and if the route is dependent on a database. If no database is found, it will redirect to URL assigned to ``redirect`` parameter. If database name is from a query parameter, it will be checked by :meth:`http.db_filter()<openerp.http.db_filter>` thus to avoid database forgery that could lead to xss attacks. :param redirect: URL to redirect to :type redirect: str :returns: ``None`` :rtype: NoneType """ db = request.params.get('db') # Ensure "legitness" of database if db and db not in http.db_filter([db]): db = None if db and not request.session.db: # User asked a specific database on a new session. # That mean the nodb router has been used to find the route # Depending on installed module in the database, # the rendering of the page # may depend on data injected by the database route dispatcher. # Thus, we redirect the user to the same page but # with the session cookie set. # This will force using the database route dispatcher... r = request.httprequest url_redirect = r.base_url if r.query_string: # Can't use werkzeug.wrappers.BaseRequest.url with encoded hashes: # https://github.com/amigrave/werkzeug/commit/ # b4a62433f2f7678c234cdcac6247a869f90a7eb7 url_redirect += '?' + r.query_string utils.redirect(url_redirect, 302) request.session.db = db abort_and_redirect(url_redirect) # if db not provided, use the session one if not db: db = request.session.db # if no database provided and no database in session, use monodb if not db: db = db_monodb(request.httprequest) # if no db can be found til here, send to the database selector # the database selector will redirect to database manager if needed if not db: exceptions.abort(utils.redirect(redirect, 303)) # always switch the session to the computed db if db != request.session.db: request.session.logout() abort_and_redirect(request.httprequest.url) request.session.db = db
def upload(map_identifier, topic_identifier): topic_store = get_topic_store() topic_map = topic_store.get_topic_map(map_identifier) if topic_map is None: abort(404) if current_user.id != topic_map.user_identifier: abort(403) topic = topic_store.get_topic( map_identifier, topic_identifier, resolve_attributes=RetrievalMode.RESOLVE_ATTRIBUTES, ) if topic is None: abort(404) form_image_title = "" form_image_scope = "*" error = 0 if request.method == "POST": form_image_title = request.form["image-title"].strip() form_image_scope = request.form["image-scope"].strip() form_upload_file = (request.files["image-file"] if "image-file" in request.files else None) # If no values have been provided set their default values if not form_image_scope: form_image_scope = "*" # Universal scope # Validate form inputs if not form_image_title: error = error | 1 if not form_upload_file: error = error | 2 else: if form_upload_file.filename == "": error = error | 4 elif not allowed_file(form_upload_file.filename): error = error | 8 if not topic_store.topic_exists(topic_map.identifier, form_image_scope): error = error | 16 if error != 0: flash( "An error occurred when uploading the image. Please review the warnings and fix accordingly.", "warning", ) else: image_file_name = ( f"{str(uuid.uuid4())}.{get_file_extension(form_upload_file.filename)}" ) # Create the image directory for this topic map and topic if it doesn't already exist image_directory = os.path.join(bp.root_path, RESOURCES_DIRECTORY, str(map_identifier), topic_identifier) if not os.path.isdir(image_directory): os.makedirs(image_directory) file_path = os.path.join(image_directory, image_file_name) form_upload_file.save(file_path) image_occurrence = Occurrence( instance_of="image", topic_identifier=topic.identifier, scope=form_image_scope, resource_ref=image_file_name, ) title_attribute = Attribute( "title", form_image_title, image_occurrence.identifier, data_type=DataType.STRING, ) # Persist objects to the topic store topic_store.set_occurrence(topic_map.identifier, image_occurrence) topic_store.set_attribute(topic_map.identifier, title_attribute) flash("Image successfully uploaded.", "success") return redirect( url_for( "image.index", map_identifier=topic_map.identifier, topic_identifier=topic.identifier, )) return render_template( "image/upload.html", error=error, topic_map=topic_map, topic=topic, image_title=form_image_title, image_scope=form_image_scope, )
def catch_all(path): if 'app' in path: return render_template('index.html') return abort(404)
def take_patient_observation(self, observation, patient_id, *args, **kw): """ Renders the :class:`observation<observations.nh_clinical_patient_observation>` entry view. :returns: observations entry response object :rtype: :class:`http.Response<openerp.http.Response>` """ cr, uid, context = request.cr, request.uid, request.context api_pool = request.registry('nh.eobs.api') follow_activities = api_pool.get_assigned_activities( cr, uid, activity_type='nh.clinical.patient.follow', context=context) patient = dict() patient_info = api_pool.get_patients(cr, uid, [int(patient_id)], context=context) if not patient_info: exceptions.abort(404) elif len(patient_info) > 0: patient_info = patient_info[0] patient['url'] = URLS['single_patient'] + '{0}'.format( patient_info['id']) patient['name'] = patient_info['full_name'] patient['id'] = patient_info['id'] form = dict() form['action'] = URLS['patient_form_action'] + '{0}/{1}'.format( observation, patient_id) form['type'] = observation form['task-id'] = False form['patient-id'] = int(patient_id) form['source'] = "patient" form['start'] = datetime.now().strftime('%s') form['obs_needs_score'] = False form_desc = api_pool.get_form_description( cr, uid, int(patient_id), 'nh.clinical.patient.observation.{0}'.format(observation), context=context) for form_input in form_desc: if form_input['type'] in ['float', 'integer']: input_type = form_input['type'] form_input['step'] = 0.1 if input_type is 'float' else 1 form_input['type'] = 'number' form_input['number'] = True form_input['info'] = '' form_input['errors'] = '' form_input['min'] = str(form_input['min']) elif form_input['type'] == 'selection': form_input['selection_options'] = [] form_input['info'] = '' form_input['errors'] = '' for option in form_input['selection']: opt = dict() opt['value'] = '{0}'.format(option[0]) opt['label'] = option[1] form_input['selection_options'].append(opt) elif form_input['type'] == 'meta': obs_score = form_input['score'] score_present = 'score' in form_input form['obs_needs_score'] = obs_score if score_present else False observation_name_list = [] for ob in api_pool._active_observations: if ob['type'] == observation: observation_name_list.append(ob['name']) if len(observation_name_list) == 0: exceptions.abort(404) return request.render('nh_eobs_mobile.observation_entry', qcontext={ 'inputs': form_desc, 'name': observation_name_list[0], 'patient': patient, 'form': form, 'section': 'patient', 'notification_count': len(follow_activities), 'username': request.session['login'], 'urls': URLS })
def create(map_identifier, topic_identifier): topic_store = get_topic_store() topic_map = topic_store.get_topic_map(map_identifier) if topic_map is None: abort(404) if current_user.id != topic_map.user_identifier: abort(403) topic = topic_store.get_topic( map_identifier, topic_identifier, resolve_attributes=RetrievalMode.RESOLVE_ATTRIBUTES, ) if topic is None: abort(404) form_association_instance_of = "association" form_association_src_topic_ref = ( topic_identifier # The current topic is the 'source' topic ) form_association_src_role_spec = "related" form_association_dest_topic_ref = "" form_association_dest_role_spec = "related" form_association_scope = session["current_scope"] form_association_name = "" form_association_identifier = "" error = 0 if request.method == "POST": form_association_dest_topic_ref = request.form[ "association-dest-topic-ref"].strip() form_association_dest_role_spec = request.form[ "association-dest-role-spec"].strip() form_association_src_topic_ref = topic_identifier form_association_src_role_spec = request.form[ "association-src-role-spec"].strip() form_association_instance_of = request.form[ "association-instance-of"].strip() form_association_scope = request.form["association-scope"].strip() form_association_name = request.form["association-name"].strip() form_association_identifier = request.form[ "association-identifier"].strip() # If no values have been provided set their default values if not form_association_dest_role_spec: form_association_dest_role_spec = "related" if not form_association_src_role_spec: form_association_src_role_spec = "related" if not form_association_instance_of: form_association_instance_of = "association" if not form_association_scope: form_association_scope = "*" # Universal scope if not form_association_name: form_association_name = "Undefined" # Validate form inputs if not topic_store.topic_exists(topic_map.identifier, form_association_dest_topic_ref): error = error | 1 if (form_association_dest_role_spec != "related" and not topic_store.topic_exists( topic_map.identifier, form_association_dest_role_spec)): error = error | 2 if (form_association_src_role_spec != "related" and not topic_store.topic_exists( topic_map.identifier, form_association_src_role_spec)): error = error | 4 if (form_association_instance_of != "association" and not topic_store.topic_exists( topic_map.identifier, form_association_instance_of)): error = error | 8 if form_association_scope != "*" and not topic_store.topic_exists( topic_map.identifier, form_association_scope): error = error | 16 if form_association_identifier and topic_store.topic_exists( topic_map.identifier, form_association_identifier): error = error | 32 # If role identifier topics are missing then create them if error & 2: # Destination role spec pass if error & 4: # Source role spec pass if error != 0: flash( "An error occurred when submitting the form. Please review the warnings and fix accordingly.", "warning", ) else: association = Association( identifier=form_association_identifier, instance_of=form_association_instance_of, name=form_association_name, scope=form_association_scope, src_topic_ref=form_association_src_topic_ref, dest_topic_ref=form_association_dest_topic_ref, src_role_spec=form_association_src_role_spec, dest_role_spec=form_association_dest_role_spec, ) # Persist association object to the topic store topic_store.set_association(map_identifier, association) flash("Association successfully created.", "success") return redirect( url_for( "association.index", map_identifier=topic_map.identifier, topic_identifier=topic_identifier, )) return render_template( "association/create.html", error=error, topic_map=topic_map, topic=topic, association_instance_of=form_association_instance_of, association_src_topic_ref=form_association_src_topic_ref, association_src_role_spec=form_association_src_role_spec, association_dest_topic_ref=form_association_dest_topic_ref, association_dest_role_spec=form_association_dest_role_spec, association_scope=form_association_scope, association_name=form_association_name, association_identifier=form_association_identifier, )
def get_object_or_404(model, *criterion): try: rv = model.query.filter(*criterion).one() except exc.NoResultFound, exc.MultipleResultsFound: abort(404)
def get_entree(id): entree = Entree.objects().get(id=id) if entree is None: abort(404, "Entree id {0} doesn't exist.".format(id)) return entree
def index(): # TODO: provide an appropriate output return abort(501)
def wrap(*args, **kwargs): if session['admin'] == True: return f(*args, **kwargs) else: abort(403, 'Unauthorized activity')
def add_trees(): form = AddTreesForm(request.form) if request.method == 'POST': # First Add Method new_method = TreeMethod() new_method.description = request.form.get('description') new_method.gene_family_method_id = request.form.get( 'gene_family_method_id') db.session.add(new_method) try: # Commit to DB remainder db.session.commit() except Exception as e: db.session.rollback() flash('Failed to add TreeMethod to the DB!', 'danger') return redirect(url_for('admin.index')) # Build conversion table from SequenceIDs.txt sequence_ids_data = request.files[ form.sequence_ids.name].read().decode('utf-8') id_conversion = __read_sequence_ids(sequence_ids_data.split('\n')) # Get original gene family names (used to link trees to families) gfs = GeneFamily.query.filter( GeneFamily.method_id == new_method.gene_family_method_id).all() ori_name_to_id = {gf.original_name: gf.id for gf in gfs} tree_data = request.files[form.tree_archive.name].read() fd, temp_path = mkstemp() with open(temp_path, 'wb') as tree_data_writer: tree_data_writer.write(tree_data) new_trees = [] with tarfile.open(temp_path, mode='r:gz') as tf: for name, entry in zip(tf.getnames(), tf): tree_string = str( tf.extractfile(entry).read().decode('utf-8')).replace( '\r', '').replace('\n', '') # get the gene families original name from the filename original_name = str(name.split('_')[0]) gf_id = None if original_name in ori_name_to_id.keys(): gf_id = ori_name_to_id[original_name] else: print( '%s: Family %s not found in gene families generated using method %d !' % (name, original_name, new_method.gene_family_method_id)) new_trees.append({ "gf_id": gf_id, "label": original_name + "_tree", "method_id": new_method.id, "data_newick": __replace_ids(tree_string, id_conversion), "data_phyloxml": None }) # add 400 trees at the time, more can cause problems with some database engines if len(new_trees) > 400: db.engine.execute(Tree.__table__.insert(), new_trees) new_trees = [] # add the last set of trees db.engine.execute(Tree.__table__.insert(), new_trees) flash('Added trees to DB.', 'success') return redirect(url_for('admin.index')) else: if not form.validate(): flash('Unable to validate data, potentially missing fields', 'danger') return redirect(url_for('admin.index')) else: abort(405)
def _dispatch(request, pool, *args, **kwargs): # AKE: perf analyzer hooks try: PerfLog().on_enter() except Exception: perf_logger.exception('on_enter failed') DatabaseOperationalError = backend.get('DatabaseOperationalError') obj, method = get_object_method(request, pool) if method in obj.__rpc__: rpc = obj.__rpc__[method] else: abort(HTTPStatus.FORBIDDEN) user = request.user_id session = None if request.authorization.type == 'session': session = request.authorization.get('session') if rpc.fresh_session and session: context = {'_request': request.context} if not security.check_timeout( pool.database_name, user, session, context=context): abort(http.client.UNAUTHORIZED) log_message = '%s.%s(*%s, **%s) from %s@%s/%s' username = request.authorization.username if isinstance(username, bytes): username = username.decode('utf-8') log_args = (obj, method, args, kwargs, username, request.remote_addr, request.path) logger.info(log_message, *log_args) # JCA: log slow RPC if slow_threshold >= 0: slow_msg = '%s.%s (%s s)' slow_args = (obj, method) slow_start = time.time() user = request.user_id # AKE: add session to transaction context token, session = None, None if request.authorization.type == 'session': session = request.authorization.get('session') elif request.authorization.type == 'token': token = { 'key': request.authorization.get('token'), 'user': user, 'party': request.authorization.get('party_id'), } # AKE: perf analyzer hooks try: PerfLog().on_execute(user, session, request.rpc_method, args, kwargs) except Exception: perf_logger.exception('on_execute failed') for count in range(config.getint('database', 'retry'), -1, -1): with Transaction().start(pool.database_name, user, readonly=rpc.readonly) as transaction: try: c_args, c_kwargs, transaction.context, transaction.timestamp \ = rpc.convert(obj, *args, **kwargs) # AKE: add session to transaction context transaction.context.update({ 'session': session, 'token': token, }) transaction.context['_request'] = request.context meth = getattr(obj, method) # AKE: perf analyzer hooks try: wrapped_meth = profile(meth) except Exception: perf_logger.exception('profile failed') else: meth = wrapped_meth if (rpc.instantiate is None or not is_instance_method(obj, method)): result = rpc.result(meth(*c_args, **c_kwargs)) else: assert rpc.instantiate == 0 inst = c_args.pop(0) if hasattr(inst, method): result = rpc.result(meth(inst, *c_args, **c_kwargs)) else: result = [ rpc.result(meth(i, *c_args, **c_kwargs)) for i in inst ] except DatabaseOperationalError: if count and not rpc.readonly: transaction.rollback() continue logger.error(log_message, *log_args, exc_info=True) # JCA: log slow RPC if slow_threshold >= 0: slow_args += (str(time.time() - slow_start), ) log_exception(slow_logger.error, slow_msg, *slow_args) raise except (ConcurrencyException, UserError, UserWarning, LoginException): logger.debug(log_message, *log_args, exc_info=True) # JCA: log slow RPC if slow_threshold >= 0: slow_args += (str(time.time() - slow_start), ) log_exception(slow_logger.debug, slow_msg, *slow_args) raise except Exception: logger.error(log_message, *log_args, exc_info=True) # JCA: log slow RPC if slow_threshold >= 0: slow_args += (str(time.time() - slow_start), ) log_exception(slow_logger.error, slow_msg, *slow_args) raise # Need to commit to unlock SQLite database transaction.commit() if request.authorization.type == 'session': # AKE: moved all session ops to security script security.reset_user_session(pool.database_name, user, request.authorization.get('session')) while transaction.tasks: task_id = transaction.tasks.pop() run_task(pool, task_id) if session: context = {'_request': request.context} security.reset(pool.database_name, session, context=context) logger.debug('Result: %s', result) # JCA: log slow RPC if slow_threshold >= 0: slow_diff = time.time() - slow_start slow_args += (str(slow_diff), ) if slow_diff > slow_threshold: slow_logger.info(slow_msg, *slow_args) else: slow_logger.debug(slow_msg, *slow_args) # AKE: perf analyzer hooks try: PerfLog().on_leave(result) except Exception: perf_logger.exception('on_leave failed') return result
def sales_by_events_view(path): from_date = request.args.get('from_date') to_date = request.args.get('to_date') if ('from_date' in request.args and not from_date) or ('to_date' in request.args and not to_date) or \ ('from_date' in request.args and 'to_date' not in request.args) or \ ('to_date' in request.args and 'from_date' not in request.args): return redirect(url_for('.sales_by_events_view', path=path)) promoted_events = path == 'promoted-events' if from_date and to_date: orders = TicketingManager.get_orders( from_date=datetime.strptime(from_date, '%d/%m/%Y'), to_date=datetime.strptime(to_date, '%d/%m/%Y'), promoted_event=promoted_events ) else: orders = TicketingManager.get_orders(promoted_event=promoted_events) if promoted_events: events = DataGetter.get_all_events_with_discounts() else: events = DataGetter.get_all_events() orders_summary = { 'completed': { 'class': 'success', 'tickets_count': 0, 'orders_count': 0, 'total_sales': 0 }, 'pending': { 'class': 'warning', 'tickets_count': 0, 'orders_count': 0, 'total_sales': 0 }, 'expired': { 'class': 'danger', 'tickets_count': 0, 'orders_count': 0, 'total_sales': 0 } } tickets_summary_event_wise = {} tickets_summary_organizer_wise = {} tickets_summary_location_wise = {} for event in events: tickets_summary_event_wise[str(event.id)] = { 'name': event.name, 'payment_currency': event.payment_currency, 'marketer': '', 'discount_code': '', 'completed': { 'tickets_count': 0, 'sales': 0 }, 'pending': { 'tickets_count': 0, 'sales': 0 }, 'expired': { 'class': 'danger', 'tickets_count': 0, 'sales': 0 } } if promoted_events: tickets_summary_event_wise[str(event.id)]['marketer'] = \ event.discount_code.marketer.email tickets_summary_event_wise[str(event.id)]['discount_code'] = \ str(event.discount_code.value) + '% off for ' + str(event.discount_code.max_quantity) + ' months' tickets_summary_organizer_wise[str(event.creator_id)] = \ copy.deepcopy(tickets_summary_event_wise[str(event.id)]) if event.creator: tickets_summary_organizer_wise[str(event.creator_id)]['name'] = event.creator.email tickets_summary_location_wise[unicode(event.searchable_location_name)] = \ copy.deepcopy(tickets_summary_event_wise[str(event.id)]) tickets_summary_location_wise[unicode(event.searchable_location_name)]['name'] = \ event.searchable_location_name for order in orders: if order.status == 'initialized': order.status = 'pending' orders_summary[str(order.status)]['orders_count'] += 1 orders_summary[str(order.status)]['total_sales'] += forex(order.event.payment_currency, display_currency, order.amount) for order_ticket in order.tickets: orders_summary[str(order.status)]['tickets_count'] += order_ticket.quantity ticket = CachedGetter.get_ticket(order_ticket.ticket_id) tickets_summary_event_wise[str(order.event_id)][str(order.status)]['tickets_count'] \ += order_ticket.quantity tickets_summary_organizer_wise[str(order.event.creator_id)][str(order.status)]['tickets_count'] \ += order_ticket.quantity tickets_summary_location_wise[str(order .event.searchable_location_name)][str(order .status)]['tickets_count'] \ += order_ticket.quantity if order.paid_via != 'free' and order.amount > 0: tickets_summary_event_wise[str(order.event_id)][str(order.status)]['sales'] += \ order_ticket.quantity * ticket.price tickets_summary_organizer_wise[str(order.event.creator_id)][str(order.status)]['sales'] += \ order_ticket.quantity * ticket.price tickets_summary_location_wise[str(order.event. searchable_location_name)][str(order. status)]['sales'] += \ order_ticket.quantity * ticket.price if path == 'events' or path == 'promoted-events': return render_template('gentelella/admin/super_admin/sales/by_events.html', tickets_summary=tickets_summary_event_wise, display_currency=display_currency, from_date=from_date, to_date=to_date, path=path, orders_summary=orders_summary) elif path == 'organizers': return render_template('gentelella/admin/super_admin/sales/by_organizer.html', tickets_summary=tickets_summary_organizer_wise, display_currency=display_currency, from_date=from_date, to_date=to_date, path=path, orders_summary=orders_summary) elif path == 'locations': return render_template('gentelella/admin/super_admin/sales/by_location.html', tickets_summary=tickets_summary_location_wise, display_currency=display_currency, from_date=from_date, to_date=to_date, path=path, orders_summary=orders_summary) else: abort(404)
def get_object_or_404(model, *criterion): """ Snippet byVitaliy Shishorin, http://flask.pocoo.org/snippets/115/""" try: return model.query.filter(*criterion).one() except exc.NoResultFound, exc.MultipleResultsFound: abort(404)
def save_profile_config(): if g.user is None: abort(403) d = request.json if not d: abort(400) if 'id' not in d: abort(400) if 'config' not in d: abort(400) pr_id = d['id'] config = d['config'] db = get_db() profile = db.execute(""" SELECT * FROM profile WHERE id = ? """, (pr_id,)).fetchone() if profile is None: abort(404) if profile['user_id'] != g.user['id']: abort(403) db.execute(""" UPDATE profile SET config = ? WHERE id = ? """, (config, pr_id)) db.commit() return {}, 200
def search(): try: text = None sources = None locations = None languages = None informea = None limit = None page = None if request.method == 'GET': text = request.args.get('text', default=None, type=str) sources = request.args.get('sources', default='eurlex', type=str) locations = request.args.get('locations', default=None, type=str) languages = request.args.get('languages', default=None, type=str) informea = request.args.get('informea', default=None, type=str) limit = request.args.get('limit', default=20, type=int) page = request.args.get('page', default=1, type=int) else: # TODO: log exception return abort(405) #HOST = app.config.get('TEXT_EMBEDDING_HOST', 'localhost') #PORT = app.config.get('TEXT_EMBEDDING_PORT', '4222') # query_params = { # 'query': text # } #r = requests.get(f"http://{HOST}:{PORT}/api/v1/embeddings/expand", params=query_params) #r = json.loads(r.text) #tokens = r.get("expanded_query", []) # establish connection with elasticsearch es = config_es.get_es() ######################################### # Prepare the must query params ######################################### must_query = [] # prepare the locations if locations: es_locations = locations.split(",") must_query.append({ "nested": { "path": "named_entities", "query": { "bool": { "must": [{ "terms": { "named_entities.name": es_locations } }, { "term": { "named_entities.type": "LOCATION" } }] } } } }) if languages: es_languages = languages.split(",") must_query.append({ "terms": { "languages": es_languages } }) if informea: es_informea = informea.split(",") must_query.append({ "terms": { "informea": es_informea } }) ######################################### # Prepare the should query params ######################################### should_query = [] if text: should_query.append({ "match": { "title": text } }) should_query.append({ "match": { "fulltext": text } }) should_query.append({ "match": { "abstract": text } }) ######################################### # Prepare the filter query ######################################### filter_query = [] # prepare the sources filter_query.append({ "terms": { "source": sources.split(",") } }) ######################################### # Prepare the pagination params ######################################### if page < MIN_PAGE: page = MIN_PAGE if limit < MIN_LIMIT: limit = MIN_LIMIT elif limit > MAX_LIMIT: limit = MAX_LIMIT offset = (page - 1) * limit size = limit ######################################### # Construct the query object ######################################### es_query = { "from": offset, "size": size, "sort" : [ "_score" ], "query": { "bool": { "filter": filter_query } }, "min_score": 4, "track_total_hits": True } if len(must_query) != 0: es_query["query"]["bool"]["must"] = must_query if len(should_query) != 0: es_query["query"]["bool"]["should"] = should_query; print(es_query) # run the query on elasticsearch results = es.search(index="envirolens", body=es_query) # prepare output of the elasticsearch response documents = [format_document(document) for document in results["hits"]["hits"]] # prepare metadata information for easier navigation TOTAL_HITS = results["hits"]["total"]["value"] TOTAL_PAGES = math.ceil(TOTAL_HITS / size) prev_page = format_url(BASE_URL, { "text": text, "sources": sources, "locations": locations, "languages": languages, "informea": informea, "limit": limit, "page": page - 1 }) if page - 1 > 0 else None next_page = format_url(BASE_URL, { "text": text, "sources": sources, "locations": locations, "languages": languages, "informea": informea, "limit": limit, "page": page + 1 }) if page + 1 <= TOTAL_PAGES else None except Exception as e: # TODO: log exception # something went wrong with the request return abort(400, str(e)) else: # TODO: return the documents return jsonify({ "query": { "text": text, "sources": sources, "locations": locations, "languages": languages, "informea": informea, "limit": limit, "page": page }, "documents": documents, "metadata": { "total_hits": TOTAL_HITS, "total_pages": TOTAL_PAGES, "prev_page": prev_page, "next_page": next_page } })
def tweak_slice_file(): try: if request.method == 'POST': app.logger.debug("request on: %s", request) # 0) Get url on which to upload the requested file octoprint_url = request.form.get("octoprint_url", None) if octoprint_url: app.logger.info( "Getting request from octoprint server: {}".format( octoprint_url.split("?apikey")[0])) else: app.logger.info("Getting request") # 1) Check if the input is correct # 1.1) Get the model file and check for correctness if 'model' not in request.files: return jsonify('No model file in request') # load the file uploaded_file = request.files['model'] # if no file was selected, submit an empty one if uploaded_file.filename == '': flash('No selected model') return redirect(request.url) if not (uploaded_file and allowed_file(uploaded_file.filename)): flash('Invalid model') return redirect(request.url) filename = secure_filename(uploaded_file.filename) app.logger.info("Uploaded new model: {}".format(filename)) uploaded_file.save( os.path.join(app.config['UPLOAD_FOLDER'], filename)) app.logger.info("Saved model to {}/{}".format( app.config['UPLOAD_FOLDER'], filename)) # 1.2) Get the profile if 'profile' in request.files: profile = request.files["profile"] if profile.filename == '': flash('No selected profile') return redirect(request.url) elif profile == "no_slicing": profile_path = None else: profilename = secure_filename(profile.filename) app.logger.info( "Uploaded new profile: {}".format(profilename)) profile.save( os.path.join(app.config['PROFILE_FOLDER'], profilename)) profile_path = os.path.join(app.config['PROFILE_FOLDER'], profilename) else: profile = request.form.get("profile") if profile == "no_slicing": profile_path = None else: flash('No profile in request, using default profile') profile_path = os.path.join(app.config["PROFILE_FOLDER"], profile) if not os.path.exists(profile_path): profile_path = app.config['DEFAULT_PROFILE'] app.logger.info("Using profile: {}".format(profile_path)) # 1.3) Get the tweak actions # Get the tweak option and use extendedTweak minimize the volume as default tweak_actions = request.form.get( "tweak_actions") # of the form: "tweak slice get_tweaked_stl") command = "Convert" if not tweak_actions: # This is the case in the UI mode tweak_actions = list() if profile_path: tweak_actions.append("slice") command = request.form.get("tweak_option", "Convert") if command and command != "Convert": tweak_actions.append("tweak") else: tweak_actions = tweak_actions.split() if "tweak" in tweak_actions: command = "extendedTweakVol" app.logger.info("Using Tweaker actions: {}".format( ", ".join(tweak_actions))) cmd_map = dict({ "Tweak": "", "extendedTweak": "-x --minimize surfaces", "extendedTweakVol": "-x", "Convert": "-c", "ascii STL": "-t asciistl", "binary STL": "-t binarystl" }) if 'SLIC3R_PATH' not in app.config and "slice" in tweak_actions: app.logger.error( "The provided Slic3r paths are invalid, therefore slicing is not possible! {}" .format(app.config['SLIC3R_PATHS'])) return redirect(request.url) # 1.4) Get the machinecode_name, if slicing was chosen if profile_path: machinecode_name = request.form.get( "machinecode_name", filename.replace(".stl", ".gcode")) # if "tweak" in tweak_actions: # machinecode_name = "tweaked_{}".format(machinecode_name) gcode_path = os.path.join(app.config["UPLOAD_FOLDER"], machinecode_name) app.logger.info( "Machinecode will have name {}".format(machinecode_name)) # 2.1) retrieve the model file and perform the tweaking if "tweak" in tweak_actions: cmd = "{pythonpath} {curpath}Tweaker-3{sep}Tweaker.py -i {upload_folder}{sep}{input} {cmd} " \ "{output} -o {upload_folder}{sep}tweaked_{input}".format(pythonpath=sys.executable, curpath=CURPATH, sep=os.sep, upload_folder=app.config[ 'UPLOAD_FOLDER'], input=filename, cmd=cmd_map[command], output=cmd_map["binary STL"]) app.logger.info("Running Tweak with command: '{}'".format(cmd)) ret = os.popen(cmd) response = ret.read() if response == "": app.logger.info("Tweaking was successful") filename = "tweaked_{}".format(filename) else: app.logger.error( "Tweaking was executed with the warning: {}.".format( response)) else: app.logger.info("Tweaking was skipped as expected.") # 2.2) Send back tweaked file to requester if octoprint_url and ("get_tweaked_stl" in tweak_actions or "slice" not in tweak_actions): # Upload the tweaked model via API to octoprint # find the apikey in octoprint server, settings, access control outfile = "{UPLOAD_FOLDER}{sep}{filename}".format( UPLOAD_FOLDER=app.config['UPLOAD_FOLDER'], filename=filename, sep=os.sep) app.logger.info("Sending file '{}' to URL '{}'".format( outfile, octoprint_url.split("?apikey")[0])) files = {'file': open(outfile, 'rb')} r = requests.post(octoprint_url, files=files, verify=False) if r.status_code == 201: app.logger.info( "Sended back tweaked stl to server {} with code '{}'". format( octoprint_url.split("?apikey")[0], r.status_code)) flash( "Sended back tweaked stl to server {} with code '{}'". format( octoprint_url.split("?apikey")[0], r.status_code)) else: app.logger.warning( "Problem while loading tweaked stl to Octoprint server {} with code '{}'" .format( octoprint_url.split("?apikey")[0], r.status_code)) # app.logger.warning(r.text) flash( "Problem while loading tweaked stl back to server with code '{}'" .format(r.status_code)) else: app.logger.info("Sending back file was skipped as expected.") # 3) Slice the tweaked model using Slic3r # Slice the file if it is set, else set gcode_path to None if profile_path and "slice" in tweak_actions: cmd = "{SLIC3R_PATH} --export-gcode --repair --center 0,0 {UPLOAD_FOLDER}{sep}{filename} " \ "--load {profile} --output {gcode_path}".format( sep=os.sep, SLIC3R_PATH=app.config['SLIC3R_PATH'], UPLOAD_FOLDER=app.config['UPLOAD_FOLDER'], filename=filename, profile=profile_path, gcode_path=gcode_path) app.logger.info( "Slicing the tweaked model with command: {}".format(cmd)) # ret = os.popen(cmd) response = os.popen(cmd).read() if "Done. Process took" in response: app.logger.info("Slicing was successful") else: app.logger.error( "Slicing was executed with the warning: {}.".format( response)) if profile_path.split(os.sep)[-1].startswith( "slicing-profile-temp") and profile_path.endswith( ".profile"): os.remove(profile_path) else: gcode_path = None # 4) Redirect the ready gcode if a octoprint url was given if octoprint_url and gcode_path: # Upload a model via API to octoprint # find the apikey in octoprint server, settings, access control # outfile = "{gcode_path}".format(gcode_path=gcode_path) app.logger.info("Sending file '{}' to URL '{}'".format( gcode_path, octoprint_url.split("?apikey")[0])) files = {'file': open(gcode_path, 'rb')} r = requests.post(octoprint_url, files=files, verify=False) if r.status_code == 201: app.logger.info( "Sended back tweaked stl to server {} with code '{}'". format( octoprint_url.split("?apikey")[0], r.status_code)) flash( "Sended back tweaked stl to server {} with code '{}'". format( octoprint_url.split("?apikey")[0], r.status_code)) else: app.logger.warning( "Problem while loading file to Octoprint server {} with code '{}'" .format( octoprint_url.split("?apikey")[0], r.status_code)) # app.logger.warning(r.text) flash( "Problem while loading file back to server with code '{}'" .format(r.status_code)) return redirect(octoprint_url) else: if gcode_path: # model was sliced, return gcode app.logger.debug( "Handling the download of '{}'.".format(gcode_path)) if request.headers.get('Accept') == "text/plain": response = Response(open(gcode_path, 'rb').read()) else: response = Response( open(gcode_path, 'rb').read(), mimetype='application/octet-stream') response.headers[ 'Content-Disposition'] = "inline; filename=" + machinecode_name response.headers['Access-Control-Allow-Origin'] = "*" else: # model was not sliced, return tweaked model tweaked_file_path = "{upload_folder}{sep}{input}".format( sep=os.sep, upload_folder=app.config['UPLOAD_FOLDER'], input=filename) app.logger.debug("Handling the download of '{}'.".format( tweaked_file_path)) if request.headers.get('Accept') == "text/plain": response = Response( open(tweaked_file_path, 'rb').read()) else: response = Response( open(tweaked_file_path, 'rb').read(), mimetype='application/octet-stream') response.headers[ 'Content-Disposition'] = "inline; filename=" + filename response.headers['Access-Control-Allow-Origin'] = "*" return response else: return render_template('tweak_slice.html', profiles=os.listdir( app.config['PROFILE_FOLDER'])) except RequestEntityTooLarge: abort(413)
def find_or_fail(model, *criterion): try: return model.query.filter(*criterion).one() except (exc.NoResultFound, exc.MultipleResultsFound): abort(404)
def wrapped(*args, **kwargs): # type: ignore if not current_user.is_authenticated: return redirect(url_for('login', next=request.path)) if not is_authorized(group): abort(403) return func(*args, **kwargs)
def get_logs_with_deleted_objects(params: CollectionParameters): manager = CmdbLogManager(database_manager=database_manager) try: query = [] if isinstance(params.filter, dict): query.append({'$match': params.filter}) elif isinstance(params.filter, list): for pipe in params.filter: query.append(pipe) query.append({ '$match': { 'log_type': CmdbObjectLog.__name__, 'action': { '$ne': LogAction.DELETE.value } } }) query.append({ "$lookup": { "from": "framework.objects", "let": { "ref_id": "$object_id" }, "pipeline": [{ '$match': { '$expr': { '$eq': ["$public_id", '$$ref_id'] } } }], "as": "object" } }) query.append({ '$unwind': { 'path': '$object', 'preserveNullAndEmptyArrays': True } }) query.append({'$match': {'object': {'$exists': False}}}) body = request.method == 'HEAD' object_logs = manager.iterate(filter=query, limit=params.limit, skip=params.skip, sort=params.sort, order=params.order) logs = [CmdbObjectLog.to_json(_) for _ in object_logs.results] api_response = GetMultiResponse(logs, total=object_logs.total, params=params, url=request.url, model=CmdbMetaLog.MODEL, body=body) except ManagerIterationError as err: return abort(400, err.message) except ObjectManagerGetError as err: LOGGER.error(f'Error in get_logs_with_deleted_objects: {err}') return abort(404) return api_response.make_response()
def vid(self, **kwargs): link, doc, doc_abspath = self._get_link(**kwargs) vid_ext = os.path.splitext(doc_abspath)[1][1:] if vid_ext not in VID_EXT: abort(404) return http.send_file(doc_abspath, mimetype=VID_EXT[vid_ext])