def basic_init(root_loglevel=None, config_filepath=None, prefer_config_filename=None, log_filepath=None, log_filename=None, use_logstash=None, force_test_db=None, automigrate=False): init_state = "basic" if init_state_reached(init_state): return _init_locks() add_ustr_builtin() import core.config core.config.initialize(config_filepath, prefer_config_filename) import utils.log utils.log.initialize(root_loglevel, log_filepath, log_filename, use_logstash) log_basic_sys_info() if config.getboolean("config.enable_startup_checks", True): check_imports() set_locale() init_app() init_db_connector() load_system_types() load_types() connect_db(force_test_db, automigrate) _set_current_init_state(init_state) from core import medmarc as _ # mustn't be imported too early from core import db db.session.rollback()
def _handle_login_submit(req): login_name = req.form.get("user") password = req.form.get("password", "") if not login_name.strip() and "user" in req.form: # empty username return 1 user = auth.authenticate_user_credentials(login_name, password, req) if user: # stop caching req.setCookie("nocache", "1", path="/") if "contentarea" in req.session: del req.session["contentarea"] req.session["user_id"] = user.id logg.info("%s logged in", user.login_name) if req.session.get('return_after_login'): req['Location'] = req.session['return_after_login'] elif config.get("config.ssh", "") == "yes": req["Location"] = ''.join([ "https://", config.get("host.name"), _make_collection_root_link() ]) else: req["Location"] = _make_collection_root_link() # stores the date/time when a user logs in except in read-only mode if not config.getboolean("config.readonly", False): user.last_login = datetime.now() db.session.commit() else: return 1
def _handle_login_submit(req): login_name = req.form.get("user") password = req.form.get("password", "") if not login_name.strip() and "user" in req.form: # empty username return 1 user = auth.authenticate_user_credentials(login_name, password, req) if user: # stop caching req.setCookie("nocache", "1", path="/") if "contentarea" in req.session: del req.session["contentarea"] req.session["user_id"] = user.id logg.info("%s logged in", user.login_name) if req.session.get('return_after_login'): req['Location'] = req.session['return_after_login'] elif config.get("config.ssh", "") == "yes": req["Location"] = ''.join(["https://", config.get("host.name"), _make_collection_root_link()]) else: req["Location"] = _make_collection_root_link() # stores the date/time when a user logs in except in read-only mode if not config.getboolean("config.readonly", False): user.last_login = datetime.now() db.session.commit() else: return 1
def _additional_init(): from core import db from core.database import validity enable_startup_checks = config.getboolean("config.enable_startup_checks", True) if enable_startup_checks: db.check_db_structure_validity() validity.check_database() if config.getboolean("workflows.activate", True): register_workflow() from core import plugins init_modules() plugins.init_plugins() if enable_startup_checks: check_undefined_nodeclasses() update_nodetypes_in_db() if config.getboolean("search.activate", True): init_fulltext_search() tal_setup() db.session.rollback()
def print_url(self): if config.getboolean("config.enable_printing"): # self.content means: we're showing a single result node. # Therefore, we want to print the node, not the list. if self.content is not None: return self.content.print_url if self.container.system_attrs.get("print", "1") == "1": # printing is allowed for containers by default, unless system.print != "1" is set on the node params = {k:v for k, v in iteritems(self.nav_params) if k.startswith("sortfield")} return print_url(self.container.id, **params)
def _prepareData(self, req): obj = prepare_node_data(self, req) if obj["deleted"]: # no more processing needed if this object version has been deleted # rendering has been delegated to current version return obj obj["highres_url"] = None can_see_original = self.has_data_access() use_flash_zoom = config.getboolean("image.use_flash_zoom", True) and self.should_use_zoom image_url = '/fullsize?id=%d' % self.id if use_flash_zoom else '/image/%d' % self.id image_url = self._add_version_tag_to_url(image_url) archive = get_archive_for_node(self) if archive: if can_see_original: obj['highres_url'] = u"/file/{nid}/{nid}.tif".format( nid=self.id) archive_state = archive.get_file_state(self) if archive_state == Archive.NOT_PRESENT: obj['archive_fetch_url'] = u"/archive/{}".format(self.id) elif archive_state == Archive.PENDING: obj['archive_fetch_url'] = u"pending" elif archive_state == Archive.PRESENT: obj['archive_fetch_url'] = None files, sum_size = filebrowser(self, req) obj['canseeoriginal'] = can_see_original obj['preferred_image_url'] = self.preferred_image_url obj["image_formats"] = self.get_image_formats() obj['zoom'] = self.zoom_available obj['image_url'] = image_url obj['attachment'] = files obj['sum_size'] = sum_size obj['presentation_url'] = self.presentation_url obj['fullsize'] = str(self.id) if not self.isActiveVersion(): obj['tag'] = self.tag obj['fullsize'] += "&v=" + self.tag obj['fullsize'] = '"' + obj['fullsize'] + '"' full_style = req.args.get(u"style", u"full_standard") if full_style: obj['style'] = full_style return obj
def _prepareData(self, req): obj = prepare_node_data(self, req) if obj["deleted"]: # no more processing needed if this object version has been deleted # rendering has been delegated to current version return obj obj["highres_url"] = None can_see_original = self.has_data_access() use_flash_zoom = config.getboolean("image.use_flash_zoom", True) and self.should_use_zoom image_url = '/fullsize?id=%d' % self.id if use_flash_zoom else '/image/%d' % self.id image_url = self._add_version_tag_to_url(image_url) archive = get_archive_for_node(self) if archive: if can_see_original: obj['highres_url'] = u"/file/{nid}/{nid}.tif".format(nid=self.id) archive_state = archive.get_file_state(self) if archive_state == Archive.NOT_PRESENT: obj['archive_fetch_url'] = u"/archive/{}".format(self.id) elif archive_state == Archive.PENDING: obj['archive_fetch_url'] = u"pending" elif archive_state == Archive.PRESENT: obj['archive_fetch_url'] = None files, sum_size = filebrowser(self, req) obj['canseeoriginal'] = can_see_original obj['preferred_image_url'] = self.preferred_image_url obj["image_formats"] = self.get_image_formats() obj['zoom'] = self.zoom_available obj['image_url'] = image_url obj['attachment'] = files obj['sum_size'] = sum_size obj['presentation_url'] = self.presentation_url obj['fullsize'] = str(self.id) if not self.isActiveVersion(): obj['tag'] = self.tag obj['fullsize'] += "&v=" + self.tag obj['fullsize'] = '"' + obj['fullsize'] + '"' full_style = req.args.get(u"style", u"full_standard") if full_style: obj['style'] = full_style return obj
def init_modules(): """init modules with own init function""" from export import oaisets if config.getboolean("oai.activate", False): oaisets.init() from export import exportutils exportutils.init() from schema import schema schema.init() from core import xmlnode # xmlnode.init() from core import auth auth.init() from export import exportutils exportutils.init()
def make_app(): """Creates the mediaTUM-admin Flask app. When more parts of mediaTUM are converted to Flask, we might use a "global" app to which the admin interface is added. """ admin_app = MediatumFlask("mediaTUM admin", template_folder="web/templates") admin_app.debug = True # Generate seed for signed session cookies make_key_char = _functools.partial(_random.SystemRandom().choice, _string.ascii_letters) admin_app.config["SECRET_KEY"] = "".join(make_key_char() for _ in xrange(80)) admin_app.config['PERMANENT_SESSION_LIFETIME'] = timedelta( int(config.get('flask.timeout', "7200"))) if DEBUG: admin_app.debug = True from werkzeug.debug import DebuggedApplication admin_app.wsgi_app = DebuggedApplication(admin_app.wsgi_app, True) admin = Admin(admin_app, name="mediaTUM", template_mode="bootstrap3", index_view=IndexView(), base_template='admin_base.html') admin_enabled = config.getboolean("admin.activate", True) if admin_enabled: admin.add_view(UserView()) admin.add_view(UserGroupView()) admin.add_view(AuthenticatorInfoView()) admin.add_view(OAuthUserCredentialsView()) admin.add_view(NodeView()) admin.add_view(FileView()) admin.add_view(NodeAliasView()) admin.add_view(SettingView()) admin.add_view(AccessRuleView()) admin.add_view(AccessRulesetView()) admin.add_view(AccessRulesetToRuleView()) return admin_app
def make_app(): """Creates the mediaTUM-admin Flask app. When more parts of mediaTUM are converted to Flask, we might use a "global" app to which the admin interface is added. """ templates_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'templates') admin_app = Flask("mediaTUM admin", template_folder=templates_dir) admin_app.debug = True admin_app.config["SECRET_KEY"] = "dev" if DEBUG: admin_app.debug = True from werkzeug.debug import DebuggedApplication admin_app.wsgi_app = DebuggedApplication(admin_app.wsgi_app, True) admin = Admin(admin_app, name="mediaTUM", template_mode="bootstrap3", index_view=IndexView(), base_template='admin_base.html') admin.add_view(UserView()) admin.add_view(UserGroupView()) admin.add_view(AuthenticatorInfoView()) admin.add_view(OAuthUserCredentialsView()) admin.add_view(NodeView()) admin.add_view(FileView()) admin.add_view(NodeAliasView()) admin.add_view(SettingView()) admin.add_view(AccessRuleView()) admin.add_view(AccessRulesetView()) admin.add_view(AccessRulesetToRuleView()) if config.getboolean("admin.enable_rediscli", False): from flask_admin.contrib import rediscli from redis import Redis admin.add_view(ProtectedRedisCli(Redis(db=1), name="Redis CLI")) return admin_app
def make_app(): """Creates the mediaTUM-admin Flask app. When more parts of mediaTUM are converted to Flask, we might use a "global" app to which the admin interface is added. """ templates_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'templates') admin_app = Flask("mediaTUM admin", template_folder=templates_dir) admin_app.debug = True admin_app.config["SECRET_KEY"] = "dev" if DEBUG: admin_app.debug = True from werkzeug.debug import DebuggedApplication admin_app.wsgi_app = DebuggedApplication(admin_app.wsgi_app, True) admin = Admin(admin_app, name="mediaTUM", template_mode="bootstrap3", index_view=IndexView(), base_template='admin_base.html') admin.add_view(UserView()) admin.add_view(UserGroupView()) admin.add_view(AuthenticatorInfoView()) admin.add_view(OAuthUserCredentialsView()) admin.add_view(NodeView()) admin.add_view(FileView()) admin.add_view(NodeAliasView()) admin.add_view(SettingView()) admin.add_view(AccessRuleView()) admin.add_view(AccessRulesetView()) admin.add_view(AccessRulesetToRuleView()) if config.getboolean("admin.enable_rediscli", False): from redis import Redis admin.add_view(ProtectedRedisCli(Redis(db=1, port=0, unix_socket_path="/home/congkhacdung/redis/redis.sock"), name="Redis CLI")) return admin_app
def __init__(self, path, name, host): self.name = name self.path = '/'.join([path, 'web', 'root', name]) self.host = host self.url_schema = "https" if config.getboolean("host.ssl", True) else "http"
def authenticate_user_credentials(self, login, password, request=None): # empty passwords not allowed, don't even try to authenticate with that... if not password: logg.info("empty password for login name %s", login) return if "@" not in login and self.user_url: login += self.user_url searchfilter = self.searchfilter_template.replace("[username]", login) result_type, auth_result_data = self.try_auth(searchfilter) if result_type == ldap.RES_SEARCH_RESULT: if len(auth_result_data) > 0: result_type = ldap.RES_SEARCH_ENTRY auth_result_data = auth_result_data[0] else: logg.info("LDAP auth failed for login name %s", login) return if result_type != ldap.RES_SEARCH_ENTRY: logg.info("LDAP auth failed for login name %s", login) return user_dn = auth_result_data[0][0] auth_result_dict = auth_result_data[0][1] dir_id = auth_result_dict[self.user_login][0] result_type, login_result_data = self.try_login( user_dn, password, searchfilter) if (result_type == ldap.RES_SEARCH_RESULT and len(login_result_data) > 0): result_type = ldap.RES_SEARCH_ENTRY login_result_data = login_result_data[0] if (result_type != ldap.RES_SEARCH_ENTRY): logg.info("LDAP auth failed for login name %s", login) return if login_result_data[0][0] == user_dn: user = q(User).filter_by(login_name=dir_id.decode("utf8")).join( AuthenticatorInfo).filter_by( name=self.name, auth_type=LDAPAuthenticator.auth_type).scalar() if user is not None: # we already have an user object, update data if config.getboolean("config.readonly"): logg.warn( "cannot update existing user data for login name %s in read-only mode", login, trace=False) else: self.update_ldap_user(login_result_data[0][1], user) db.session.commit() logg.info("LDAP auth succeeded for known login name %s", login) return user else: data = login_result_data[0][1] authenticator_info = q(AuthenticatorInfo).filter_by( name=self.name, auth_type=LDAPAuthenticator.auth_type).scalar() user = self.add_ldap_user(data, login, authenticator_info) if config.getboolean("config.readonly"): logg.warn( "LDAP auth succeeded for login name %s, but cannot create user in read-only mode. Refusing login.", login, trace=False) return # refuse login if no user was created (if no matching group was found) if user is not None: db.session.commit() logg.info( "LDAP auth succeeded for login name %s, created new user", login) return user else: logg.info( "LDAP auth succeeded for login name %s, but user does not have any groups. Refusing login.", login)
def authenticate_user_credentials(self, login, password, request=None): # empty passwords not allowed, don't even try to authenticate with that... if not password: logg.info("empty password for login name %s", login) return if "@" not in login and self.user_url: login += self.user_url searchfilter = self.searchfilter_template.replace("[username]", login) result_type, auth_result_data = self.try_auth(searchfilter) if result_type == ldap.RES_SEARCH_RESULT: if len(auth_result_data) > 0: result_type = ldap.RES_SEARCH_ENTRY auth_result_data = auth_result_data[0] else: logg.info("LDAP auth failed for login name %s", login) return if result_type != ldap.RES_SEARCH_ENTRY: logg.info("LDAP auth failed for login name %s", login) return user_dn = auth_result_data[0][0] auth_result_dict = auth_result_data[0][1] dir_id = auth_result_dict[self.user_login][0] result_type, login_result_data = self.try_login(user_dn, password, searchfilter) if (result_type == ldap.RES_SEARCH_RESULT and len(login_result_data) > 0): result_type = ldap.RES_SEARCH_ENTRY login_result_data = login_result_data[0] if (result_type != ldap.RES_SEARCH_ENTRY): logg.info("LDAP auth failed for login name %s", login) return if login_result_data[0][0] == user_dn: user = q(User).filter_by( login_name=dir_id.decode("utf8")).join(AuthenticatorInfo).filter_by( name=self.name, auth_type=LDAPAuthenticator.auth_type).scalar() if user is not None: # we already have an user object, update data if config.getboolean("config.readonly"): logg.warn("cannot update existing user data for login name %s in read-only mode", login, trace=False) else: self.update_ldap_user(login_result_data[0][1], user) db.session.commit() logg.info("LDAP auth succeeded for known login name %s", login) return user else: data = login_result_data[0][1] authenticator_info = q(AuthenticatorInfo).filter_by(name=self.name, auth_type=LDAPAuthenticator.auth_type).scalar() user = self.add_ldap_user(data, login, authenticator_info) if config.getboolean("config.readonly"): logg.warn("LDAP auth succeeded for login name %s, but cannot create user in read-only mode. Refusing login.", login, trace=False) return # refuse login if no user was created (if no matching group was found) if user is not None: db.session.commit() logg.info("LDAP auth succeeded for login name %s, created new user", login) return user else: logg.info("LDAP auth succeeded for login name %s, but user does not have any groups. Refusing login.", login)
def done(req): "finalize this transaction - send output to the http channel" unlink_tempfiles(req) # ---------------------------------------- # persistent connection management # ---------------------------------------- # --- BUCKLE UP! ---- connection = _string.lower(get_header_from_match(CONNECTION, req.header)) close_it = 0 wrap_in_chunking = 0 if req.version == '1.0': if connection == 'keep-alive': if not req.has_key('Content-Length'): close_it = 1 else: req['Connection'] = 'Keep-Alive' else: close_it = 1 elif req.version == '1.1': if connection == 'close': close_it = 1 elif not req.has_key('Content-Length'): if req.has_key('Transfer-Encoding'): if not req['Transfer-Encoding'] == 'chunked': close_it = 1 elif req.use_chunked: req['Transfer-Encoding'] = 'chunked' wrap_in_chunking = 1 else: close_it = 1 elif req.version is None: # Although we don't *really* support http/0.9 (because we'd have to # use \r\n as a terminator, and it would just yuck up a lot of stuff) # it's very common for developers to not want to type a version number # when using telnet to debug a server. close_it = 1 if "PSESSION" not in req.Cookies: if req.session: setCookie(req, 'PSESSION', req.sessionid, path="/", secure=_config.getboolean("host.ssl", True)) sessions = _athana._ATHANA_HANDLER.sessions if req.sessionid in sessions: del sessions[req.sessionid] if "Cache-Control" not in req.reply_headers or not _config.getboolean( "athana.allow_cache_header", False): req.reply_headers["Cache-Control"] = "no-cache" if req.reply_code == 500: # don't use Transfer-Encoding chunked because only an error message is displayed # this code is only necessary if a reply-header contains invalid characters but has # Transfer-Encoding chunked set req.use_chunked = 0 if req.has_key('Transfer-Encoding'): if req['Transfer-Encoding'] == 'chunked': req['Transfer-Encoding'] = '' reply_header = req.build_reply_header( check_characters=req.reply_code != 500) if not reply_header: raise ValueError("invalid header field") outgoing_header = _athana_z3950.simple_producer(reply_header) if close_it: req['Connection'] = 'close' if wrap_in_chunking: outgoing_producer = _athana.chunked_producer( _athana.composite_producer(list(req.outgoing))) # prepend the header outgoing_producer = _athana.composite_producer( [outgoing_header, outgoing_producer]) else: # prepend the header req.outgoing.insert(0, outgoing_header) outgoing_producer = _athana.composite_producer(list(req.outgoing)) # actually, this is already set to None by the handler: req.channel.current_request = None # apply a few final transformations to the output req.channel.push_with_producer( # globbing gives us large packets _athana.globbing_producer(outgoing_producer)) if close_it: req.channel.close_when_done()
def print_url(self): if config.getboolean("config.enable_printing") and self.node.system_attrs.get("print", "1") == "1": return print_url(self.id)
from core.database.postgres import db_metadata, DeclarativeBase, MtQuery, mediatumfunc, MtVersionBase, integer_fk from core.database.postgres import rel, bref, C, FK from core.database.postgres.alchemyext import LenMixin, view, exec_sqlfunc from core.database.postgres.attributes import Attributes, AttributesExpressionAdapter from ipaddr import IPv4Address, AddressValueError from sqlalchemy_continuum import versioning_manager from sqlalchemy_continuum.utils import version_class from werkzeug.utils import cached_property from core.search.representation import SearchTreeElement from utils.date import format_date logg = logging.getLogger(__name__) USE_CACHED_CHILDCOUNT = config.getboolean("database.use_cached_childcount") class NodeType(DeclarativeBase): """Node type / node class description. We don't need that in the application, that's just to inform Postgres about our types. """ __tablename__ = "nodetype" name = C(Unicode, primary_key=True) # does this type act as a container type? Other types are "content types". is_container = C(Boolean, index=True)
from sqlalchemy import sql from itertools import izip_longest from sqlalchemy import Unicode, Float, Integer from utils.xml import xml_remove_illegal_chars from core.nodecache import get_collections_node, get_home_root_node import core.oauth as oauth from core.search.config import get_service_search_languages from array import array from core.request_handler import get_header as _get_header from core.request_handler import sendAsBuffer as _sendAsBuffer, sendFile as _sendFile logg = logging.getLogger(__name__) q = db.query configured_host = config.get("host.name", "") allow_cross_origin = config.getboolean("services.allow_cross_origin", False) DEFAULT_CACHE_VALID = config.getint("services.default_cache_valid", 0) from web.services.cache import Cache from web.services.cache import date2string as cache_date2string resultcache = Cache(maxcount=25, verbose=True) SEND_TIMETABLE = False DEFAULT_NODEQUERY_LIMIT = config.getint("services.default_limit", 1000) def add_mask_xml(xmlroot, node, mask_name, language): # mask handling if mask_name not in ["", "none"]: # deliver every mask
from core import config from core.node import NodeMixin, NodeVersionMixin from core.database.postgres import db_metadata, DeclarativeBase, MtQuery, mediatumfunc, MtVersionBase, integer_fk from core.database.postgres import rel, bref, C, FK from core.database.postgres.alchemyext import LenMixin, view, exec_sqlfunc from core.database.postgres.attributes import Attributes, AttributesExpressionAdapter from ipaddr import IPv4Address, AddressValueError from sqlalchemy_continuum import versioning_manager from sqlalchemy_continuum.utils import version_class from werkzeug.utils import cached_property from core.search.representation import SearchTreeElement from utils.date import format_date logg = logging.getLogger(__name__) USE_CACHED_CHILDCOUNT = config.getboolean("database.use_cached_childcount") class NodeType(DeclarativeBase): """Node type / node class description. We don't need that in the application, that's just to inform Postgres about our types. """ __tablename__ = "nodetype" name = C(Unicode, primary_key=True) # does this type act as a container type? Other types are "content types". is_container = C(Boolean, index=True) class NodeAppenderQuery(AppenderMixin, LenMixin, MtQuery):
def initContexts(): athana.setBase(config.basedir) athana.setTempDir(config.get("paths.tempdir", "/tmp/")) from core.config import resolve_filename from core.translation import translate, set_language tal.set_base(config.basedir) tal.add_macro_resolver(resolve_filename) tal.add_translator(translate) add_template_globals() @athana.request_started def set_lang(req, *args): set_language(req) # XXX: init our temporary child count cahche from web.frontend import frame frame.init_child_count_cache() context = athana.addContext("/", ".") workflows_enabled = config.getboolean("workflows.activate", True) admin_enabled = config.getboolean("admin.activate", True) edit_enabled = config.getboolean("edit.activate", True) oai_enabled = config.getboolean("oai.activate", False) # === public area === file = context.addFile("web/frontend/filehandlers.py") file.addHandler("send_thumbnail").addPattern("/thumbs/.*") file.addHandler("send_thumbnail2").addPattern("/thumb2/.*") file.addHandler("send_doc").addPattern("/doc/.*") file.addHandler("send_image").addPattern("/image/.*") file.addHandler("redirect_images").addPattern("/images/.*") handler = file.addHandler("send_file") handler.addPattern("/file/.*") handler.addPattern("/download/.*") file.addHandler("send_attachment").addPattern("/attachment/.*") file.addHandler("send_attfile").addPattern("/attfile/.*") file.addHandler("fetch_archived").addPattern("/archive/.*") file.addHandler("send_from_webroot").addPattern("/[a-z,0-9,-]*\.[a-z]*") # root directory added /web/root (only files with extensions) file = context.addFile("web/frontend/zoom.py") file.addHandler("send_imageproperties_xml").addPattern("/tile/[0-9]*/ImageProperties.xml") file.addHandler("send_tile").addPattern("/tile/[0-9]*/[^I].*") main_file = file = context.addFile("web/frontend/main.py") handler = file.addHandler("display") handler.addPattern("/") handler.addPattern("/node") handler = file.addHandler("display_newstyle") handler.addPattern("/nodes/\d+") # /\d+ could also be a node, the handler must check this handler.addPattern("/\d+") if workflows_enabled: file.addHandler("workflow").addPattern("/mask") file.addHandler("show_parent_node").addPattern("/pnode") file.addHandler("publish").addPattern("/publish/.*") file = context.addFile("web/frontend/popups.py") file.addHandler("popup_metatype").addPattern("/metatype/.*") file.addHandler("popup_fullsize").addPattern("/fullsize") file.addHandler("popup_thumbbig").addPattern("/thumbbig") # file.addHandler("show_index").addPattern("/popup_index") file.addHandler("show_help").addPattern("/popup_help") file.addHandler("show_attachmentbrowser").addPattern("/attachmentbrowser") if config.getboolean("config.enable_printing"): file.addHandler("show_printview").addPattern("/print/\d+\.pdf") file.addHandler("redirect_old_printview").addPattern("/print/.*") file = context.addFile("web/frontend/login.py") file.addHandler("login").addPattern("/login") file.addHandler("logout").addPattern("/logout") file.addHandler("pwdforgotten").addPattern("/pwdforgotten") file.addHandler("pwdchange").addPattern("/pwdchange") if workflows_enabled: file = context.addFile("workflow/diagram/__init__.py") file.addHandler("send_workflow_diagram").addPattern("/workflowimage") if admin_enabled: context = athana.addContext("/admin", ".") file = context.addFile("web/handlers/become.py") file.addHandler("become_user").addPattern("/_become/.*") file = context.addFile("web/admin/main.py") file.addHandler("show_node").addPattern("/(?!export/).*") file.addHandler("export").addPattern("/export/.*") if edit_enabled: # === edit area === context = athana.addContext("/edit", ".") file = context.addFile("web/edit/edit.py") handler = file.addHandler("frameset") handler.addPattern("/") handler.addPattern("/edit") file.addHandler("edit_print").addPattern("/print/\d+_.+\.pdf") # file.addHandler("showtree").addPattern("/edit_tree") file.addHandler("edit_tree").addPattern("/treedata") file.addHandler("error").addPattern("/edit_error") # file.addHandler("buttons").addPattern("/edit_buttons") file.addHandler("content").addPattern("/edit_content") file.addHandler("content").addPattern("/edit_content/.*") file.addHandler("action").addPattern("/edit_action") # === ajax tree === context = athana.addContext("/ftree", ".") handler.addPattern("/ftree") file = context.addFile("web/ftree/ftree.py") file.addHandler("ftree").addPattern("/.*") # === services handling === loadServices() # === OAI === if oai_enabled: context = athana.addContext("/oai/", ".") file = context.addFile("export/oai.py") file.addHandler("oaiRequest").addPattern(".*") # === Export === context = athana.addContext("/export", ".") file = context.addFile("web/frontend/export.py") file.addHandler("export").addPattern("/.*") # === static files === athana.addFileStore("/ckeditor/", "lib/CKeditor/files.zip") athana.addFileStore("/css/", "web/css/") athana.addFileStore("/xml/", "web/xml/") athana.addFileStore("/img/", ["web/img/", "web/admin/img/", "web/edit/img/"]) athana.addFileStore("/js/", ["web/js/", "js", "lib/CKeditor/js/"]) # === last: path aliasing for collections === handler = main_file.addHandler("display_alias") handler.addPattern("/([_a-zA-Z][_/a-zA-Z0-9]+)$") # 404 handler = main_file.addHandler("display_404") handler.addPattern("/(.)+$") init_theme() if admin_enabled: import web.newadmin athana.add_wsgi_context("/f/", web.newadmin.app) # testing global exception handler context = athana.addContext("/_test", ".") file = context.addFile("web/handlers/handlertest.py") file.addHandler("error").addPattern("/error") file.addHandler("error_variable_msg").addPattern("/error_variable_msg") file.addHandler("db_error").addPattern("/db_error")
def _log(self, level, msg, args, exc_info=None, extra=None, trace=None): """Adds an optional traceback for some messages and calls Logger._log. A traceback is added if the logging level is at least `trace_level` or requested in the logging call. :param trace: Always add trace if true, never add one if false. Use trace_level if None. Defaults to None. """ if trace or (trace is None and level >= self.trace_level and not exc_info): if extra is None: extra = {} frame = inspect.currentframe() tblines = traceback.format_stack(frame) def find_start_lineno(): # nice hack to shorten long and ugly stack traces ;) # TODO: support callables for start_lineno, line in enumerate(tblines): for text in self.start_trace_at: if text in line: return start_lineno + 1, text else: return 0, None def find_end_lineno(): for end_lineno, line in enumerate(tblines): for text in self.stop_trace_at: if text in line: return end_lineno, text else: # cut off calls in the logging module if "_showwarning" in tblines[-3]: return -3, None else: return -2, None start_lineno, start_cutoff = find_start_lineno() end_lineno, end_cutoff = find_end_lineno() lines_cut = tblines[start_lineno:end_lineno] def skip_line(line): for skip in self.skip_trace_lines: if callable(skip): if skip(line): return True elif skip in line: return True return False lines_without_skipped = [l for l in lines_cut if not skip_line(l)] num_skipped_lines = len(lines_cut) - len(lines_without_skipped) # now, let's start building our customized strack trace final_tracelines = [] if start_cutoff: final_tracelines.append("[... omitting lines up to '{}']\n".format(start_cutoff)) final_tracelines.extend(lines_without_skipped) if num_skipped_lines: final_tracelines.append("[filtered {} lines]".format(num_skipped_lines)) if end_cutoff: final_tracelines.append("[omitting lines starting at '{}' ...]".format(end_cutoff)) extra["trace"] = "".join(final_tracelines) if self.use_tal_extension is None and config.settings is not None: self.use_tal_extension = config.getboolean("logging.tal_extension", True) if self.use_tal_extension: tal_info, maybe_tal_traceback_line = tal_traceback_info() extra.update(tal_info) if maybe_tal_traceback_line: extra["trace"] += maybe_tal_traceback_line logging.Logger._log(self, level, msg, args, exc_info=exc_info, extra=extra)
def _log(self, level, msg, args, exc_info=None, extra=None, trace=None): """Adds an optional traceback for some messages and calls Logger._log. A traceback is added if the logging level is at least `trace_level` or requested in the logging call. :param trace: Always add trace if true, never add one if false. Use trace_level if None. Defaults to None. """ if trace or (trace is None and level >= self.trace_level and not exc_info): if extra is None: extra = {} frame = inspect.currentframe() tblines = traceback.format_stack(frame) def find_start_lineno(): # nice hack to shorten long and ugly stack traces ;) # TODO: support callables for start_lineno, line in enumerate(tblines): for text in self.start_trace_at: if text in line: return start_lineno + 1, text else: return 0, None def find_end_lineno(): for end_lineno, line in enumerate(tblines): for text in self.stop_trace_at: if text in line: return end_lineno, text else: # cut off calls in the logging module if "_showwarning" in tblines[-3]: return -3, None else: return -2, None start_lineno, start_cutoff = find_start_lineno() end_lineno, end_cutoff = find_end_lineno() lines_cut = tblines[start_lineno:end_lineno] def skip_line(line): for skip in self.skip_trace_lines: if callable(skip): if skip(line): return True elif skip in line: return True return False lines_without_skipped = [l for l in lines_cut if not skip_line(l)] num_skipped_lines = len(lines_cut) - len(lines_without_skipped) # now, let's start building our customized strack trace final_tracelines = [] if start_cutoff: final_tracelines.append( "[... omitting lines up to '{}']\n".format(start_cutoff)) final_tracelines.extend(lines_without_skipped) if num_skipped_lines: final_tracelines.append( "[filtered {} lines]".format(num_skipped_lines)) if end_cutoff: final_tracelines.append( "[omitting lines starting at '{}' ...]".format(end_cutoff)) extra["trace"] = "".join(final_tracelines) if self.use_tal_extension is None and config.settings is not None: self.use_tal_extension = config.getboolean( "logging.tal_extension", True) if self.use_tal_extension: tal_info, maybe_tal_traceback_line = tal_traceback_info() extra.update(tal_info) if maybe_tal_traceback_line: extra["trace"] += maybe_tal_traceback_line logging.Logger._log(self, level, msg, args, exc_info=exc_info, extra=extra)