def decorator(*args, **kwargs): from pony.orm import db_session from pony.orm.core import local from flask import ( after_this_request, current_app, has_app_context, has_request_context, ) from flask.signals import appcontext_popped register = local.db_context_counter == 0 if register and (has_app_context() or has_request_context()): db_session.__enter__() result = f(*args, **kwargs) if register: if has_request_context(): @after_this_request def pop(request): db_session.__exit__() return request elif has_app_context(): @appcontext_popped.connect_via(current_app._get_current_object()) def pop(sender, *args, **kwargs): while local.db_context_counter: db_session.__exit__() else: raise RuntimeError("Needs app or request context") return result
def __init__(self, *, access_token: Optional[str] = None, password: Optional[str] = None, organization: Optional[str] = None, report_url_template: Optional[str] = None): self._access_token = access_token if access_token else app.config[ 'CREDENTIALS_MODE_ADMIN_TOKEN'] _validate_not_none(self._access_token, 'access_token') self._password = password if password else app.config[ 'CREDENTIALS_MODE_ADMIN_PASSWORD'] _validate_not_none(self._password, 'password') self._organization = organization if organization else app.config[ 'MODE_ORGANIZATION'] _validate_not_none(self._organization, 'organization') self._report_url_template = report_url_template if report_url_template else DEFAULT_REPORT_URL_TEMPLATE if has_app_context( ) and app.config['MODE_REPORT_URL_TEMPLATE'] is not None: self._report_url_template = app.config['MODE_REPORT_URL_TEMPLATE'] self._is_auth_enabled = False if has_app_context() and app.config['ACL_ENABLED_DASHBOARD_PREVIEW']: if not app.config['AUTH_USER_METHOD']: raise Exception( 'AUTH_USER_METHOD must be configured to enable ACL_ENABLED_DASHBOARD_PREVIEW' ) self._is_auth_enabled = self.__class__.__name__ in app.config[ 'ACL_ENABLED_DASHBOARD_PREVIEW'] self._auth_user_method = app.config['AUTH_USER_METHOD']
def decorator(*args, **kwargs): from pony.orm import db_session from pony.orm.core import local from flask import after_this_request, current_app, has_app_context, \ has_request_context from flask.signals import appcontext_popped register = local.db_context_counter == 0 if register and (has_app_context() or has_request_context()): db_session.__enter__() result = f(*args, **kwargs) if register: if has_request_context(): @after_this_request def pop(request): db_session.__exit__() return request elif has_app_context(): @appcontext_popped.connect_via( current_app._get_current_object() ) def pop(sender, *args, **kwargs): while local.db_context_counter: db_session.__exit__() else: raise RuntimeError('Needs app or request context') return result
def get_redis(): r = getattr(g, '_redis', None) if has_app_context() else None if r is None: redis_host = os.environ.get('REDISSERVER', 'redis') redis_port = int(os.environ.get('REDISPORT', 6379)) r = Redis(host=redis_host, port=redis_port, db=0) if has_app_context(): g._redis = r return r
def display_distance(self): if has_app_context() and g.user.is_authenticated and g.user.units: units = g.user.units else: units = 'local' # default if units == 'local': country_code = (getattr(g, 'country_code', None) if has_app_context() else None) units = country_units.get(country_code, 'km_and_metres') return utils.display_distance(units, self.dist)
def ldap_context(settings, use_cache=True): """Establishes an LDAP session context. Establishes a connection to the LDAP server from the `uri` in the ``settings`` and makes the context available in ``current_ldap``. Yields a namedtuple containing the connection to the server and the provider settings. :param settings: dict -- The settings for a LDAP provider. :param use_cache: bool -- If the connection should be cached. """ try: connection = ldap_connect(settings, use_cache=use_cache) ldap_ctx = LDAPContext(connection=connection, settings=settings) _ldap_ctx_stack.push(ldap_ctx) try: yield ldap_ctx except ldap.LDAPError: # If something went wrong we get rid of cached connections. # This is mostly for the python shell where you have a very # long-living application context that usually results in # the ldap connection timing out. _clear_ldap_cache() raise finally: assert _ldap_ctx_stack.pop( ) is ldap_ctx, "Popped wrong LDAP context" except ldap.SERVER_DOWN: if has_app_context() and current_app.debug: raise raise MultipassException("The LDAP server is unreachable") except ldap.INVALID_CREDENTIALS: if has_app_context() and current_app.debug: raise raise ValueError("Invalid bind credentials") except ldap.SIZELIMIT_EXCEEDED: raise MultipassException( "Size limit exceeded (try setting a smaller page size)") except ldap.TIMELIMIT_EXCEEDED: raise MultipassException( "The time limit for the operation has been exceeded.") except ldap.TIMEOUT: raise MultipassException("The operation timed out.") except ldap.FILTER_ERROR: raise ValueError( "The filter supplied to the operation is invalid. " "(This is most likely due to a bad user or group filter.")
def get_g(): if has_app_context(): # We're in a Flask request, the 'g' object is available return g else: # We're outside Flask (testing), use this module as a replacement 'g' return this
def run(): if not flask.has_app_context(): raise Exception( 'No request context (do not call APIs directly)') # We do not need to do anything if this is the OPTIONS part of a CORS request # TODO: may want to think of a better way of doing this where we don't need to ignore all OPTIONS requests # - check if there is an x-request-id header, or potentially # https://docs.newrelic.com/docs/apm/transactions/cross-application-traces/cross-application-tracing if flask.request.environ['REQUEST_METHOD'] == 'OPTIONS': return if requires_auth: # Check for the Authorization: Bearer <token> header with the JWT # Make sure that this JWT has been issues by someone we trust pass input_data = None if input_format is not None: try: input_data = flask.request.get_json() except Exception: raise BadRequest('Error parsing JSON') # Throws a BadRequest if the input is not valid validate(input_data, input_format) output = f(flask.request.headers, input_data) return self._make_json_response(output)
def get_merchant_names(): if has_app_context(): if not hasattr(g, 'merchant_names'): query = db.session.query(User) g.merchant_names = [(row.merchant_code, row.merchant_name) for row in query.all()] for merchant_code, merchant_name in g.merchant_names: yield merchant_code, '{0} - {1}'.format(merchant_name, merchant_code)
def audit_entry(message, level='info', extra=None): """Log entry, adding in session info such as active user""" try: logger = logging.getLogger(EVENT_LOG_NAME) log_at_level = getattr(logger, level.lower()) except AttributeError: raise ValueError(f"audit_entry given bogus level: {level}") if extra is None: extra = {} if has_app_context(): for x in ('user', 'subject'): value = get_session_value(x) if value: extra[x] = value if 'version' not in extra: extra['version'] = current_app.config['VERSION_STRING'] # echo ERRORs to current_app.logger for alerts if level.lower() == 'error': # remove obvious PHI scrubbed_extra = deepcopy(extra) for x in ('user', 'subject', 'patient'): if x in scrubbed_extra: scrubbed_extra[x] = 'REDACTED - see audit logs' current_app.logger.error(message, extra=scrubbed_extra) log_at_level(message, extra=extra)
def ensure_on_test_server() -> None: """Make sure we are on a test server. """ assert not flask.has_app_context() assert not flask.has_request_context() # pylint: disable=protected-access assert current_tester._get_current_object() is not None
def create_celery_app(flask_app=None): logger.debug("Creating celery app") if not flask_app: if has_app_context(): logger.debug("Using current flask app") app = current_app else: logger.debug("No current flask app") app = create_app() else: app = flask_app celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'], backend=app.config['CELERY_RESULT_BACKEND']) celery.conf.update(app.config) TaskBase = celery.Task class ContextTask(TaskBase): abstract = True def __call__(self, *args, **kwargs): with app.app_context(): return TaskBase.__call__(self, *args, **kwargs) celery.Task = ContextTask celery.logger = get_task_logger(__name__) app.celery = celery return app
def get_device_names(): if has_app_context(): if not hasattr(g, 'device_names'): query = db.session.query(MerchantTx.device_name.distinct().label('device_name')).filter(MerchantTx.user_id == current_user.id) g.device_names = [row.device_name for row in query.all()] for device_name in g.device_names: yield device_name, device_name
def import_csv(self): if not has_app_context(): # todo custom exception return None for row in self.csv_reader: if len(row) < self.min_cols: # todo custom exception continue manufacturer_name, device_name, repair_name, price = row # Get manufacturer manufacturer = Manufacturer.query.filter(Manufacturer.name == manufacturer_name).first() if manufacturer is None: manufacturer = Manufacturer(name=manufacturer_name) db.session.add(manufacturer) # Get device device = Device.query.filter(Device.name == device_name).first() if device is None: device = Device(name=device_name, manufacturer=manufacturer) db.session.add(device) # Get repair repair = device.repairs.filter(Repair.name == repair_name).first() if repair is None: device.repairs.append(Repair(name=repair_name, price=price)) else: repair.price = price db.session.commit()
def get_categories(): if has_app_context(): if not hasattr(g, 'categories'): query = db.session.query(MerchantTx.category.distinct().label('category')).filter(MerchantTx.user_id == current_user.id, MerchantTx.category != None) # pylint: disable=singleton-comparison g.categories = [row.category for row in query.all()] for category in g.categories: yield category, category
def after_flush(session, _): """Handle all ACL hooks after after flush.""" if not flask.has_app_context(): return relationship.handle_relationship_creation(session) access_control_list.handle_acl_creation(session) program_role_handler = program_roles.ProgramRolesHandler() program_role_handler.after_flush(session) audit_role_handler = audit_roles.AuditRolesHandler() audit_role_handler.after_flush(session) relationship_deletion.after_flush(session) if hasattr(flask.g, "new_wf_acls"): flask.g.new_wf_acls.update(workflow.get_new_wf_acls(session)) else: flask.g.new_wf_acls = workflow.get_new_wf_acls(session) if hasattr(flask.g, "deleted_wf_objects"): flask.g.deleted_wf_objects.update( workflow.get_deleted_wf_objects(session)) else: flask.g.deleted_wf_objects = workflow.get_deleted_wf_objects(session) workflow.handle_acl_changes()
def _transaction_ended(session, transaction): # The zope transaction system closes the session (and thus the # transaction) e.g. when calling `transaction.abort()`. # in this case we need to clear the memoization cache to avoid # accessing memoized objects (which are now session-less) if transaction._parent is not None: # we don't care about sub-transactions return if has_app_context(): if 'memoize_cache' in g: del g.memoize_cache if 'settings_cache' in g: del g.settings_cache if 'global_settings_cache' in g: del g.global_settings_cache if 'event_notes' in g: del g.event_notes if 'event_attachments' in g: del g.event_attachments if 'relationship_cache' in g: del g.relationship_cache if has_request_context() and hasattr(flask_session, '_user'): delattr(flask_session, '_user')
def get_settlement_statuses(): if has_app_context(): if not hasattr(g, 'settlement_statuses'): query = db.session.query(Settlement.status.distinct().label('status')) g.settlement_statuses = [row.status for row in query.all()] for status in g.settlement_statuses: yield status, status
def gather_models(): """ Inspect sqlalchemy models from current context and set global dictionary to be used in url conversion. """ global MODELS from flask import current_app, has_app_context if not has_app_context(): return if 'sqlalchemy' not in current_app.extensions: return # inspect current models and add to map db = current_app.extensions['sqlalchemy'].db registry = class_registry(db.Model) for cls in registry.values(): if isinstance(cls, type) and issubclass(cls, db.Model): # class name MODELS[cls.__name__] = cls # lowercase name MODELS[cls.__name__.lower()] = cls # snake_case name words = re.findall(r'([A-Z][0-9a-z]+)', cls.__name__) if len(words) > 1: alias = '_'.join(map(lambda x: x.lower(), words)) MODELS[alias] = cls return
def my_renderer(text): prerendered_body = text if has_app_context(): prerendered_body = render_template_string(text) return markdown.markdown( prerendered_body, extensions=app.config['FLATPAGES_MARKDOWN_EXTENSIONS'])
def discover(self, app=None): """ Perform module discovery. It does so by iterating over the list of Python packages in the order they are specified. :param app: Flask application object from where the list of Python packages is loaded (from the ``registry_namespace``). Defaults to ``current_app`` if not specified (thus requires you are working in the Flask application context). """ if app is None and has_app_context(): app = current_app if app is None: raise RegistryError("You must provide a Flask application.") blacklist = app.config.get( '%s_%s_EXCLUDE' % (self.cfg_var_prefix, self.module_name.upper()), []) for pkg in app.extensions['registry'][self.registry_namespace]: if not isinstance(pkg, six.string_types): pkg = pkg.__name__ if pkg in blacklist: continue self._discover_module(pkg)
def initialize(cls): # Lists of filters for each handler filters = {'indico': [AddIDFilter('indico')], 'other': [ExtraIndicoFilter()], 'smtp': [AddIDFilter('indico')]} config = Config.getInstance() if 'files' in config.getLoggers(): logConfFilepath = os.path.join(config.getConfigurationDir(), 'logging.conf') smtpServer = config.getSmtpServer() serverName = config.getWorkerName() if not serverName: serverName = config.getHostNameURL() # Default arguments for the handlers, taken mostly for the configuration defaultArgs = { 'indico': ("FileHandler", "('%s', 'a')" % cls._log_path('indico.log'), 'DEBUG'), 'other': ("FileHandler", "('%s', 'a')" % cls._log_path('other.log'), 'DEBUG'), 'smtp': ( "handlers.SMTPHandler", "(%s, 'logger@%s', ['%s'], 'Unexpected Exception occurred at %s')" % (smtpServer, serverName, config.getSupportEmail(), serverName), "ERROR") } cls.handlers.update(LoggerUtils.configFromFile(logConfFilepath, defaultArgs, filters)) if 'sentry' in config.getLoggers() and has_app_context(): from raven.contrib.flask import Sentry current_app.config['SENTRY_DSN'] = config.getSentryDSN() # Plug into both Flask and `logging` Sentry(current_app, logging=True, level=getattr(logging, config.getSentryLoggingLevel()))
def _invoke(self, timeout) -> bool: """ invokes the command on the remote server Returns ------- bool: True if command ended up gracefully. False otherwise Raises ------ TimeoutError: raised when timeout reached while waiting the response back from the remote server """ ctx = None if not has_app_context(): ctx = self._app.app_context() ctx.push() try: self.__dict__['_server'] = Server.query.get(self._server) # set a timeout if none to avoid infinite wait in event if timeout is None: timeout = defaults.TIMEOUT_REMOTE_COMMAND if not self._command._cp: auth = HTTPBearerAuth( create_access_token(self._command.var_context.env['executor_id'], datetime.timedelta(seconds=15))) start = time.time() data = dict(operation=base64.b64encode(pickle.dumps(self._command.implementation)).decode('ascii'), var_context=base64.b64encode(pickle.dumps(self._command.var_context)).decode('ascii'), params=base64.b64encode(pickle.dumps(self._command.params)).decode('ascii'), timeout=timeout, step_id=str(self.id[1]), orch_execution=self._command.register.json_orch_execution, event_id=str(uuid.uuid4())) resp = post(server=self.server, view_or_url='api_1_0.launch_operation', json=data, auth=auth, timeout=timeout) if resp.code == 204: current_app.events.register(data['event_id'], self.callback_completion_event) event = self._completion_event.wait(timeout=timeout - (time.time() - start)) if event is not True: self._command._cp = CompletedProcess(success=False, stdout='', stderr=f'Timeout of {timeout} reached waiting ' f'server operation completion') elif resp.code == 200: self.callback_completion_event(Event(None, data=resp.msg)) elif resp.code: if isinstance(resp.msg, dict): msg = json.dumps(resp.msg) else: msg = str(resp.msg) self._command._cp = CompletedProcess(success=False, stdout='', stderr=msg, rc=resp.code) finally: if ctx: ctx.pop() return self.success
def execRestApi(cls, endpoint, method, data=None): '''Making a RESTful request for access to database Args: endpoint (str) : URI method: (str) : GET/POST/PUT data (dict) : arguments/data sent with API Returns: (dict) response info, including rc, msg and data ''' # for unit tests on web which indirectly invoke RESTful API if has_app_context() and current_app.testing: return cls.execTestRestApi(endpoint, method, data=data) if method == "POST": r = requests.post(cls._rest_api_tmpl % (endpoint, ), data=data) elif method == "PUT": r = requests.put(cls._rest_api_tmpl % (endpoint, ), data=data) else: # GET r = requests.get(cls._rest_api_tmpl % (endpoint, ), params=data) if r.status_code != 200: return None return r.json()
def read_configuration(self, env='CELERY_CONFIG_MODULE'): if has_app_context(): config = current_config else: config_name = os.environ.get(env) if config_name is not None: config = load_config(config_name) else: config = None try: worker_config = config['WORKER'] except Exception: warnings.warn("--config seems not provided; isn't it missing?") return celery_config = { 'CELERY_ACCEPT_CONTENT': ['json'], 'CELERY_TASK_SERIALIZER': 'json', 'CELERY_IMPORTS': ast.literal_eval(worker_config['celery_imports']), } celery_config.update(worker_config) celery_config['APP_CONFIG'] = config return celery_config
def app(self): """Return ``app`` from constructor or ``current_app``.""" if self._app is None and has_app_context(): return current_app._get_current_object() if self._app is None: raise Exception('Application context or app argument needed.') return self._app
def discover(self, app=None): """ Perform module discovery, by iterating over the list of Python packages in the order they are specified. :param app: Flask application object from where the list of Python packages is loaded (from the ``registry_namespace``). Defaults to ``current_app`` if not specified (thus requires you are working in the Flask application context). """ if app is None and has_app_context(): app = current_app if app is None: raise RegistryError("You must provide a Flask application.") blacklist = app.config.get( '%s_%s_EXCLUDE' % (self.cfg_var_prefix, self.module_name.upper()), [] ) for pkg in app.extensions['registry'][self.registry_namespace]: if not isinstance(pkg, six.string_types): pkg = pkg.__name__ if pkg in blacklist: continue self._discover_module(pkg)
def discover(self, app=None, *args, **kwargs): """ Discover modules Specific modules can be excluded with the configuration variable ``<NAMESPACE>_<MODULE_NAME>_EXCLUDE`` (e.g ``PACKAGES_VIEWS_EXCLUDE``). The namespace name is capitalized and have dots replace by underscore. :param module_name: Name of module to look for in packages :param registry_namespace: Name of registry containing the package registry. Defaults to ``packages``. :param with_setup: Call ``setup`` and ``teardown`` functions on module. """ if app is None and has_app_context(): app = current_app if app is None and hasattr(self, 'app'): app = getattr(self, 'app') if app is None: RegistryError("You must provide a Flask application.") blacklist = app.config.get( '%s_%s_EXCLUDE' % (self.cfg_var_prefix, self.module_name.upper()), [] ) for pkg in app.extensions['registry'][self.registry_namespace]: if not isinstance(pkg, six.string_types): pkg = pkg.__name__ if pkg in blacklist: continue self._discover_module(pkg, app)
def check_free_space(config=None): ''' Check how much disk space is free. E-mail admin if free space is low. ''' if config is None: if not has_app_context(): return config = current_app.config min_free_space = config.get('MIN_FREE_SPACE') if not min_free_space: # not configured return s = os.statvfs('/') free_space = s.f_bsize * s.f_bavail if free_space > min_free_space: return readable = humanize.naturalsize(free_space) subject = 'Low disk space: {} OSM/Wikidata matcher'.format(readable) body = ''' Warning The OSM/Wikidata matcher server is low on space. There is currently {} available. '''.format(readable) mail.send_mail(subject, body, config=config)
def import_module_from_packages(name, app=None, packages=None, silent=False): """Import modules from packages.""" warnings.warn("Use of import_module_from_packages has been deprecated." " Please use Flask-Registry instead.", DeprecationWarning) if app is None and has_app_context(): app = current_app if app is None: raise Exception( 'Working outside application context or provide app' ) if packages is None: packages = app.config.get('PACKAGES', []) for package in packages: if package.endswith('.*'): for module in find_modules(package[:-2], include_packages=True): try: yield import_string(module + '.' + name, silent) except ImportError: pass except Exception: app.logger.exception("could not import %s.%s", package, name) continue try: yield import_string(package + '.' + name, silent) except ImportError: pass except Exception: app.logger.exception("could not import %s.%s", package, name)
def __init__(self, max_workers=None, thread_name_prefix=''): """Initializes a new ThreadPoolExecutor instance. Args: max_workers: The maximum number of threads that can be used to execute the given calls. thread_name_prefix: An optional name prefix to give our threads. """ if max_workers is None: # Use this number because ThreadPoolExecutor is often # used to overlap I/O instead of CPU work. max_workers = (os.cpu_count() or 1) * 5 if max_workers <= 0: raise ValueError("max_workers must be greater than 0") if not has_app_context(): raise RuntimeError(APP_CONTEXT_ERROR) self._app_ctx = _app_ctx_stack.top self._max_workers = max_workers self._work_queue = queue.Queue() self._threads = set() self._shutdown = False self._shutdown_lock = threading.Lock() self._thread_name_prefix = (thread_name_prefix or ("ThreadPoolExecutor-%d" % self._counter()))
def get_encoder(): if has_app_context(): path = current_app.config["PIPE_PATH"] if "encoder" not in g: pipe = load(path) g.encoder = Encoder(pipe) return g.encoder
def import_module_from_packages(name, app=None, packages=None, silent=False): if packages is None: if app is None and has_app_context(): app = current_app if app is None: raise Exception( 'Working outside application context or provide app' ) #FIXME packages = app.config.get('PACKAGES', []) for package in packages: if package.endswith('.*'): for module in find_modules(package[:-2], include_packages=True): try: yield import_string(module + '.' + name, silent) except ImportError: pass except Exception as e: import traceback traceback.print_exc() app.logger.error('Could not import: "%s.%s: %s', module, name, str(e)) pass continue try: yield import_string(package + '.' + name, silent) except ImportError: pass except Exception as e: import traceback traceback.print_exc() app.logger.error('Could not import: "%s.%s: %s', package, name, str(e)) pass
def is_auth_enabled(func_name=None): """Is authentication enabled? (at least one method is enabled) :param str func_name: (optional, default None) When request context is not found, func_name is used to verfify that current endpoint url is auth demo or not (apispec documentation case). """ # auth enabled or auth demo enabled and current url is auth demo # and at least one auth method enabled if not has_app_context(): return False is_url_demo = False auth_demo_endpoint = current_app.config['AUTHENTICATION_DEMO_ENDPOINT'] if has_request_context(): is_url_demo = auth_demo_endpoint in request.url elif func_name is not None: is_url_demo = auth_demo_endpoint[1:].replace('/', '_') in func_name.lower() return (( current_app.config['AUTHENTICATION_ENABLED'] or ( # auth demo case current_app.config['AUTHENTICATION_DEMO_ENABLED'] and is_url_demo)) and ( # is at least one authentication mode active? current_app.config['AUTH_JWT_ENABLED'] or current_app.config['AUTH_CERTIFICATE_ENABLED'] or current_app.config['AUTH_SAML_ENABLED']))
def db_refresh(short=False): """Refresh the database. :param short: Short version """ create_local_context = not has_app_context() if create_local_context: # When this command is run from the command line, there is no app context, so let's create one from dontforget.app import create_app app_ = create_app(TestConfig) Migrate(app_, db, os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "..", "..", "migrations")) context = app_.app_context() context.push() tmp_handlers = current_app.logger.handlers current_app.logger.handlers = [] tmp_stderr = sys.stderr if short: sys.stderr = None db.reflect() drop_everything() upgrade() if short: sys.stderr = tmp_stderr current_app.logger.handlers = tmp_handlers if create_local_context: # Remove the context after use db.session.remove() context.pop()
def register_types(conn): if has_app_context(): real_conn = conn._con._con # conn 是 PooledDB(或PersistentDB)的连接,它的 _con 是 SteadyDB。而 SteadyDB 的 _con 是原始的 psycopg2 连接对象 else: real_conn = conn register_uuid(conn_or_curs=real_conn) register_hstore(conn_or_curs=real_conn)
def setup(sphinx): """Setup Sphinx object.""" from flask import has_app_context from invenio.base.factory import create_app if not has_app_context(): app = create_app() ctx = app.test_request_context("/") ctx.push()
def _get_ldap_cache(): """Returns the cache dictionary for ldap contexts""" if not has_app_context(): return {} try: return g._multipass_ldap_connections except AttributeError: g._multipass_ldap_connections = cache = {} return cache
def _transaction_ended(session, transaction): # The zope transaction system closes the session (and thus the # transaction) e.g. when calling `transaction.abort()`. # in this case we need to clear the memoization cache to avoid # accessing memoized objects (which are now session-less) if has_app_context() and 'memoize_cache' in g: del g.memoize_cache if has_request_context() and hasattr(flask_session, '_user'): delattr(flask_session, '_user')
def ldap_context(settings, use_cache=True): """Establishes an LDAP session context. Establishes a connection to the LDAP server from the `uri` in the ``settings`` and makes the context available in ``current_ldap``. Yields a namedtuple containing the connection to the server and the provider settings. :param settings: dict -- The settings for a LDAP provider. :param use_cache: bool -- If the connection should be cached. """ try: connection = ldap_connect(settings, use_cache=use_cache) ldap_ctx = LDAPContext(connection=connection, settings=settings) _ldap_ctx_stack.push(ldap_ctx) try: yield ldap_ctx except ldap.LDAPError: # If something went wrong we get rid of cached connections. # This is mostly for the python shell where you have a very # long-living application context that usually results in # the ldap connection timing out. _clear_ldap_cache() raise finally: assert _ldap_ctx_stack.pop() is ldap_ctx, "Popped wrong LDAP context" except ldap.SERVER_DOWN: if has_app_context() and current_app.debug: raise raise MultipassException("The LDAP server is unreachable") except ldap.INVALID_CREDENTIALS: if has_app_context() and current_app.debug: raise raise ValueError("Invalid bind credentials") except ldap.SIZELIMIT_EXCEEDED: raise MultipassException("Size limit exceeded (try setting a smaller page size)") except ldap.TIMELIMIT_EXCEEDED: raise MultipassException("The time limit for the operation has been exceeded.") except ldap.TIMEOUT: raise MultipassException("The operation timed out.") except ldap.FILTER_ERROR: raise ValueError("The filter supplied to the operation is invalid. " "(This is most likely due to a bad user or group filter.")
def decorated_func(*args, **kwargs): """Force this function to run within the application context.""" if not has_app_context() or new_context: with get_application().test_request_context('/'): # FIXME we should use maybe app_context() current_app.preprocess_request() result = f(*args, **kwargs) else: result = f(*args, **kwargs) return result
def __init__(self, module_name, app=None, registry_namespace=None, with_setup=False, silent=False): super(ModuleAutoDiscoveryRegistry, self).__init__( module_name, with_setup=with_setup, silent=silent, registry_namespace=registry_namespace ) if app is None and has_app_context(): app = current_app self.app = app self.discover(app=app)
def _clear_ldap_cache(*args, **kwargs): if not has_app_context() or '_multipass_ldap_connections' not in g: return for conn in itervalues(g._multipass_ldap_connections): try: conn.unbind_s() except ldap.LDAPError: # That's ugly but we couldn't care less about a failure while disconnecting pass del g._multipass_ldap_connections
def setup(sphinx): """Setup Sphinx object.""" from flask import has_app_context from invenio_base.factory import create_app PACKAGES = ['invenio_accounts', 'invenio_groups', 'invenio_records', 'invenio_tags', 'invenio_base'] if not has_app_context(): app = create_app(PACKAGES=PACKAGES) ctx = app.test_request_context('/') ctx.push()
def start_db_session(): """Starts a new db_session if it does not exists""" # print('==> Start session') if has_db_session(): return if not has_app_context() or not has_request_context(): raise RuntimeError('You need app_context or request_context') db_session.__enter__()
def wrapper(*args, **kwargs): resp = f(*args, **kwargs) mask = self.mask if has_app_context(): mask_header = current_app.config['RESTPLUS_MASK_HEADER'] mask = request.headers.get(mask_header) or mask if isinstance(resp, tuple): data, code, headers = unpack(resp) return marshal(data, self.fields, self.envelope, mask), code, headers else: return marshal(resp, self.fields, self.envelope, mask)
def after_flush(session, _): """Handle all ACL hooks after after flush.""" with benchmark("handle ACL hooks after flush"): if not flask.has_app_context(): return acl_ids, relationship_ids, deleted = _get_propagation_entries(session) _add_or_update("new_acl_ids", acl_ids) _add_or_update("new_relationship_ids", relationship_ids) _add_or_update("deleted_objects", deleted)
def get_cache(create=False): """ Retrieves the cache from the Flask global object. The create arg indicates if a new cache should be created if none exists. If we are not in a request context, no cache is created (return None). """ if has_app_context(): cache = getattr(g, 'cache', None) if cache is None and create: cache = g.cache = Cache() return cache else: logger.warning("No app context - no cache created") return None
def marshal(data, fields, envelope=None): """Takes raw data (in the form of a dict, list, object) and a dict of fields to output and filters the data based on those fields. :param data: the actual object(s) from which the fields are taken from :param fields: a dict of whose keys will make up the final serialized response output :param envelope: optional key that will be used to envelop the serialized response >>> from flask_restplus import fields, marshal >>> data = { 'a': 100, 'b': 'foo' } >>> mfields = { 'a': fields.Raw } >>> marshal(data, mfields) OrderedDict([('a', 100)]) >>> marshal(data, mfields, envelope='data') OrderedDict([('data', OrderedDict([('a', 100)]))]) """ def make(cls): if isinstance(cls, type): return cls() return cls if has_app_context(): mask_header = current_app.config['RESTPLUS_MASK_HEADER'] mask = request.headers.get(mask_header) if mask: fields = apply_mask(fields, mask) if isinstance(data, (list, tuple)): out = [marshal(d, fields) for d in data] if envelope: out = OrderedDict([(envelope, out)]) return out items = ((k, marshal(data, v) if isinstance(v, dict) else make(v).output(k, data)) for k, v in fields.items()) out = OrderedDict(items) if envelope: out = OrderedDict([(envelope, out)]) return out
def get_connection(): if has_app_context(): connection = getattr(g,'_jsonld_service',None) if connection is None: service_def = current_app.config.get('SPARQL_ENDPOINT') context = current_app.config.get('LD_CONTEXT') _make_connection = getattr(g,'_jsonld_make_connection',None) if _make_connection is None: _make_connection = make_connection connection = g._jsonld_service = _make_connection(service_def.get('url'),username=service_def.get('username'),password=service_def.get('password'),context=context) return connection else: raise ValueError('No connection can be created')
def send(self, auth_header=None, **data): from keg import current_app if self.__log_reports__: # Report logging is enabled. Store the report in __report_log__ self.__report_log__.append(data) return if flask.has_app_context() and current_app.config.get('TESTING'): # We are in a test. Don't pass on the report to Sentry. return # If we are not testing or we cannot determine that because we are not within an # app context, pass the report through return super(SentryClient, self).send(auth_header=auth_header, **data) # pragma: no cover
def _get_client(self, credentials=None): partial = mendeley.Mendeley( client_id=self.client_id, client_secret=self.client_secret, redirect_uri=web_url_for('oauth_callback', service_name='mendeley', _absolute=True) if has_app_context() else None, ) credentials = credentials or { 'access_token': self.account.oauth_key, 'refresh_token': self.account.refresh_token, 'expires_at': time.mktime(self.account.expires_at.timetuple()), 'token_type': 'bearer', } return APISession(partial, credentials)
def flask_app(self): if has_app_context(): return flask_current_app._get_current_object() self.flask_app_factory = symbol_by_name(self.flask_app_factory) app = self.flask_app_factory() if 'sentry' in app.extensions: from raven.contrib.celery import register_signal, register_logger_signal client = app.extensions['sentry'].client client.tags['process_type'] = 'celery task' register_signal(client) register_logger_signal(client) register_after_fork(app, self._setup_after_fork) return app
def flask_app(self): if has_app_context(): return unwrap(flask_current_app) self.flask_app_factory = symbol_by_name(self.flask_app_factory) app = self.flask_app_factory() if "sentry" in app.extensions: from raven.contrib.celery import register_signal, register_logger_signal client = app.extensions["sentry"].client client.tags["process_type"] = "celery task" register_signal(client) register_logger_signal(client) register_after_fork(app, self._setup_after_fork) return app
def get_translation_domain(plugin_name=_use_context): """Get the translation domain for the given plugin If `plugin_name` is omitted, the plugin will be taken from current_plugin. If `plugin_name` is None, the core translation domain ('indico') will be used. """ if plugin_name is None: return get_domain() else: plugin = None if has_app_context(): from indico.core.plugins import plugin_engine plugin = plugin_engine.get_plugin(plugin_name) if plugin_name is not _use_context else current_plugin if plugin: return plugin.translation_domain else: return get_domain()
def log_sql_query(dbhost, sql, param=None): """ Log SQL query into prefix/var/log/dbquery.log log file. In order to enable logging of all SQL queries, please uncomment one line in run_sql() above. Useful for fine-level debugging only! """ from invenio.utils.date import convert_datestruct_to_datetext from invenio.utils.text import indent_text date_of_log = convert_datestruct_to_datetext(time.localtime()) message = date_of_log + '-->\n' message += indent_text('Host:\n' + indent_text(str(dbhost), 2, wrap=True), 2) message += indent_text('Query:\n' + indent_text(str(sql), 2, wrap=True), 2) message += indent_text('Params:\n' + indent_text(str(param), 2, wrap=True), 2) message += '-----------------------------\n\n' if has_app_context(): current_app.logger.info(message) else: print(message, file=sys.stderr)