def get_connection(backend=None, template_prefix=None, template_suffix=None, fail_silently=False, **kwargs): """Load a templated e-mail backend and return an instance of it. If backend is None (default) settings.TEMPLATED_EMAIL_BACKEND is used. Both fail_silently and other keyword arguments are used in the constructor of the backend. """ # This method is mostly a copy of the backend loader present in django.core.mail.get_connection klass_path = backend or getattr(settings, 'TEMPLATED_EMAIL_BACKEND', TemplateBackend) if isinstance(klass_path, basestring): try: # First check if class name is omited and we have module in settings mod = import_module(klass_path) klass_name = 'TemplateBackend' except ImportError, e: # Fallback to class name try: mod_name, klass_name = klass_path.rsplit('.', 1) mod = import_module(mod_name) except ImportError, e: raise ImproperlyConfigured( ('Error importing templated email backend module %s: "%s"' % (mod_name, e)))
def discover_apps(self): if self.discovered: return #import all the modules if settings.CMS_APPHOOKS: for app in settings.CMS_APPHOOKS: self.block_register = True path = ".".join(app.split(".")[:-1]) class_name = app.split(".")[-1] module = import_module(path) cls = getattr(module, class_name, None) if cls is None: raise ImproperlyConfigured( "Cannot find class %s" % app ) self.block_register = False self.register(cls) else: for app in settings.INSTALLED_APPS: cms_app = '%s.cms_app' % app try: import_module(cms_app) except ImportError: pass self.discovered = True
def test_home_page_displays_only_todos_added(self): ToDo.objects.create(item='Code unit test', added_by=ADMIN_ID, date_todo=TODAY, archive='0') ToDo.objects.create(item='Fix code', added_by=OTHER_ID, date_todo=TODAY, archive='0') request = HttpRequest() engine = import_module(settings.SESSION_ENGINE) session_key = None request.session = engine.SessionStore(session_key) request.session['id'] = ADMIN_ID request.session['is_superuser'] = ADMIN_IS_SUPERUSER request.session['first_name'] = ADMIN_FIRST_NAME request.session['last_name'] = ADMIN_LAST_NAME response = home_page(request) self.assertIn('Code unit test', response.content.decode()) self.assertNotIn('Fix code', response.content.decode()) request = HttpRequest() engine = import_module(settings.SESSION_ENGINE) session_key = None request.session = engine.SessionStore(session_key) request.session['id'] = OTHER_ID request.session['is_superuser'] = OTHER_IS_SUPERUSER request.session['first_name'] = OTHER_FIRST_NAME request.session['last_name'] = OTHER_LAST_NAME response = home_page(request) self.assertNotIn('Code unit test', response.content.decode()) self.assertIn('Fix code', response.content.decode())
def create_initial_revisions(self, app, model_class, comment, verbosity=2, **kwargs): """Creates the set of initial revisions for the given model.""" # Import the relevant admin module. try: import_module("%s.admin" % app.__name__.rsplit(".", 1)[0]) except ImportError: pass # Check all models for empty revisions. if revision.is_registered(model_class): created_count = 0 # HACK: This join can't be done in the database, due to incompatibilities # between unicode object_ids and integer pks on strict backends like postgres. for obj in model_class._default_manager.iterator(): if Version.objects.get_for_object(obj).count() == 0: try: self.version_save(obj, comment) except: print "ERROR: Could not save initial version for %s %s." % (model_class.__name__, obj.pk) raise created_count += 1 # Print out a message, if feeling verbose. if created_count > 0 and verbosity >= 2: print u"Created %s initial revisions for model %s." % (created_count, model_class._meta.verbose_name) else: if verbosity >= 2: print u"Model %s is not registered." % (model_class._meta.verbose_name)
def setup_environ(dunder_file=None, project_path=None): assert not (dunder_file and project_path), ("You must not specify both " "__file__ and project_path") if dunder_file is not None: file_path = os.path.abspath(os.path.dirname(dunder_file)) deploy_files = [ "fcgi.py", "wsgi.py", ] if dunder_file in deploy_files: project_path = os.path.abspath(os.path.join(file_path, os.pardir)) else: project_path = file_path # the basename must be the project name and importable. project_name = os.path.basename(project_path) # setup Django correctly (the hard-coding of settings is only temporary. # carljm's proposal will remove that) os.environ["DJANGO_SETTINGS_MODULE"] = "%s.settings" % project_name # ensure the importablity of project sys.path.append(os.path.join(project_path, os.pardir)) import_module(project_name) sys.path.pop() # Pinax adds an app directory for users as a reliable location for # Django apps sys.path.insert(0, os.path.join(project_path, "apps"))
def autodiscover(): """ Auto-discover INSTALLED_APPS ophandler.py modules and fail silently when not present. This forces an import on them to register any handler bits they may want. """ import copy from django.conf import settings from django.utils.importlib import import_module from django.utils.module_loading import module_has_submodule handler_module_name = settings.OBJECT_PERMISSION_HANDLER_MODULE_NAME for app in settings.INSTALLED_APPS: mod = import_module(app) # Attempt to import the app's object permission handler module. try: before_import_registry = copy.copy(site._registry) import_module('%s.%s' % (app, handler_module_name)) except: # Reset the model registry to the state before the last import as # this import will have to reoccur on the next request and this # could raise NotRegistered and AlreadyRegistered exceptions site._registry = before_import_registry # Decide wheter to bubble up this error. If the app just # doesn't have an object permission module, we can ignore the error # attempting to import it, otherwise we want it to bubble up. if module_has_submodule(mod, handler_module_name): raise
def get_model(app_label, model_name): """ Fetches a Django model using the app registry. This doesn't require that an app with the given app label exists, which makes it safe to call when the registry is being populated. All other methods to access models might raise an exception about the registry not being ready yet. Raises LookupError if model isn't found. """ try: return apps.get_model(app_label, model_name) except AppRegistryNotReady: if apps.apps_ready and not apps.models_ready: # If this function is called while `apps.populate()` is # loading models, ensure that the module that defines the # target model has been imported and try looking the model up # in the app registry. This effectively emulates # `from path.to.app.models import Model` where we use # `Model = get_model('app', 'Model')` instead. app_config = apps.get_app_config(app_label) # `app_config.import_models()` cannot be used here because it # would interfere with `apps.populate()`. import_module('%s.%s' % (app_config.name, MODELS_MODULE_NAME)) # In order to account for case-insensitivity of model_name, # look up the model through a private API of the app registry. return apps.get_registered_model(app_label, model_name) else: # This must be a different case (e.g. the model really doesn't # exist). We just re-raise the exception. raise
def create_initial_revisions(self, app, model_class, verbosity=2, **kwargs): """ all stolen from management/__init__.py :) """ # Import the relevant admin module. try: import_module("%s.admin" % app.__name__.rsplit(".", 1)[0]) except ImportError: pass # Check all models for empty revisions. if revision.is_registered(model_class): content_type = ContentType.objects.get_for_model(model_class) # Get the id for all models that have not got at least one revision. # HACK: This join can't be done in the database, for potential incompatibilities # between unicode object_ids and integer pks on strict backends like postgres. versioned_ids = frozenset(Version.objects.filter(content_type=content_type).values_list("object_id", flat=True).distinct().iterator()) all_ids = frozenset(unicode(id) for id in model_class._default_manager.values_list("pk", flat=True).iterator()) unversioned_ids = all_ids - versioned_ids # Create the initial revision for all unversioned models. created_count = 0 for unversioned_obj in model_class._default_manager.filter(pk__in=unversioned_ids).iterator(): self.version_save(unversioned_obj) created_count += 1 # Print out a message, if feeling verbose. if created_count > 0 and verbosity >= 2: self.stdout.write(u"Created %s initial revisions for model %s.\n" % (created_count, model_class._meta.verbose_name)) else: if verbosity >= 2: self.stdout.write(u"Model %s is not registered.\n" % (model_class._meta.verbose_name))
def load_backend(backend_name): try: module = import_module('.base', 'django.db.backends.%s' % backend_name) import warnings warnings.warn( "Short names for DATABASE_ENGINE are deprecated; prepend with 'django.db.backends.'", PendingDeprecationWarning ) return module except ImportError, e: # Look for a fully qualified database backend name try: return import_module('.base', backend_name) except ImportError, e_user: # The database backend wasn't found. Display a helpful error message # listing all possible (built-in) database backends. backend_dir = os.path.join(os.path.dirname(__file__), 'backends') try: available_backends = [f for f in os.listdir(backend_dir) if os.path.isdir(os.path.join(backend_dir, f)) and not f.startswith('.')] except EnvironmentError: available_backends = [] available_backends.sort() if backend_name not in available_backends: error_msg = ("%r isn't an available database backend. \n" + "Try using django.db.backends.XXX, where XXX is one of:\n %s\n" + "Error was: %s") % \ (backend_name, ", ".join(map(repr, available_backends)), e_user) raise ImproperlyConfigured(error_msg) else: raise # If there's some other error, this must be an error in Django itself.
def autodiscover(): """ Auto-discover INSTALLED_APPS backends.py modules that inherit from GenericSocialUserBackend and fail silently when not present. This forces an import on them to register any backend classes. """ from copy import copy from django.conf import settings from django.contrib.admin.sites import site from django.utils.importlib import import_module from django.utils.module_loading import module_has_submodule for app in settings.INSTALLED_APPS: mod = import_module(app) # Attempt to import the app's email module. try: before_import_registry = copy(site._registry) import_module('%s.backends' % app) except Exception, exc: # Reset the model registry to the state before the last import as # this import will have to reoccur on the next request and this # could raise NotRegistered and AlreadyRegistered exceptions # (see #8245). site._registry = before_import_registry # backends exists but import failed, raise the exception. if module_has_submodule(mod, 'backends'): raise Exception( 'Failed to import {0}.backends with error: {1}.'.format( app, exc))
def post_exec_hook(hook): """ Runs a hook function defined in a deploy.py file """ # post_setupnode hook module_name = ".".join([env.project_package_name, "deploy"]) funcs_run = [] try: imported = import_module(module_name) func = vars(imported).get(hook) if func: func() funcs_run.append(func) except ImportError: return # run per app for app in env.INSTALLED_APPS: if app == "woven": continue module_name = ".".join([app, "deploy"]) try: imported = import_module(module_name) func = vars(imported).get(hook) if func and func not in funcs_run: func() funcs_run.append(func) except ImportError: pass import woven.deploy func = vars(woven.deploy).get(hook) if func and func not in funcs_run: func()
def get_callable(lookup_view, can_fail=False): """ Convert a string version of a function name to the callable object. If the lookup_view is not an import path, it is assumed to be a URL pattern label and the original string is returned. If can_fail is True, lookup_view might be a URL pattern label, so errors during the import fail and the string is returned. """ if not callable(lookup_view): mod_name, func_name = get_mod_func(lookup_view) try: if func_name != "": lookup_view = getattr(import_module(mod_name), func_name) if not callable(lookup_view): raise ViewDoesNotExist("Could not import %s.%s. View is not callable." % (mod_name, func_name)) except AttributeError: if not can_fail: raise ViewDoesNotExist( "Could not import %s. View does not exist in module %s." % (lookup_view, mod_name) ) except ImportError: parentmod, submod = get_mod_func(mod_name) if not can_fail and submod != "" and not module_has_submodule(import_module(parentmod), submod): raise ViewDoesNotExist( "Could not import %s. Parent module %s does not exist." % (lookup_view, mod_name) ) if not can_fail: raise return lookup_view
def view_index(request): if not utils.docutils_is_available: return missing_docutils_page(request) if settings.ADMIN_FOR: settings_modules = [import_module(m) for m in settings.ADMIN_FOR] else: settings_modules = [settings] views = [] for settings_mod in settings_modules: urlconf = import_module(settings_mod.ROOT_URLCONF) view_functions = extract_views_from_urlpatterns(urlconf.urlpatterns) if Site._meta.installed: site_obj = Site.objects.get(pk=settings_mod.SITE_ID) else: site_obj = GenericSite() for (func, regex) in view_functions: views.append({ 'name': func.__name__, 'module': func.__module__, 'site_id': settings_mod.SITE_ID, 'site': site_obj, 'url': simplify_regex(regex), }) return render_to_response('admin_doc/view_index.html', { 'root_path': get_root_path(), 'views': views }, context_instance=RequestContext(request))
def autodiscover(): """ Auto-discover INSTALLED_APPS admin.py modules and fail silently when not present. This forces an import on them to register any admin bits they may want. """ import copy from django.conf import settings from django.utils.importlib import import_module from django.utils.module_loading import module_has_submodule for app in settings.INSTALLED_APPS: mod = import_module(app) # Attempt to import the app's admin module. try: before_import_registry = copy.copy(site._registry) import_module('%s.admin' % app) except: # Reset the model registry to the state before the last import as # this import will have to reoccur on the next request and this # could raise NotRegistered and AlreadyRegistered exceptions # (see #8245). site._registry = before_import_registry # Decide whether to bubble up this error. If the app just # doesn't have an admin module, we can ignore the error # attempting to import it, otherwise we want it to bubble up. if module_has_submodule(mod, 'admin'): raise
def _autodiscover(registry): """See documentation for autodiscover (without the underscore)""" import copy from django.conf import settings from django.utils.importlib import import_module from django.utils.module_loading import module_has_submodule for app in settings.INSTALLED_APPS: mod = import_module(app) # Attempt to import the app's admin module. try: before_import_registry = copy.copy(registry) import_module('%s.autocomplete_light_registry' % app) except: # Reset the model registry to the state before the last import as # this import will have to reoccur on the next request and this # could raise NotRegistered and AlreadyRegistered exceptions # (see #8245). registry = before_import_registry # Decide whether to bubble up this error. If the app just # doesn't have an admin module, we can ignore the error # attempting to import it, otherwise we want it to bubble up. if module_has_submodule(mod, 'autocomplete_light_registry'): raise
def collect_indexes(self): indexes = [] for app in settings.INSTALLED_APPS: mod = importlib.import_module(app) try: search_index_module = importlib.import_module("%s.search_indexes" % app) except ImportError: if module_has_submodule(mod, "search_indexes"): raise continue for item_name, item in inspect.getmembers(search_index_module, inspect.isclass): if getattr(item, "haystack_use_for_indexing", False) and getattr(item, "get_model", None): # We've got an index. Check if we should be ignoring it. class_path = "%s.search_indexes.%s" % (app, item_name) if class_path in self.excluded_indexes or self.excluded_indexes_ids.get(item_name) == id(item): self.excluded_indexes_ids[str(item_name)] = id(item) continue indexes.append(item()) return indexes
def _urls(self): """Constructs the URLconf for Horizon from registered Dashboards.""" urlpatterns = self._get_default_urlpatterns() self._autodiscover() # Discover each dashboard's panels. for dash in self._registry.values(): dash._autodiscover() # Load the plugin-based panel configuration self._load_panel_customization() # Allow for override modules if self._conf.get("customization_module", None): customization_module = self._conf["customization_module"] bits = customization_module.split('.') mod_name = bits.pop() package = '.'.join(bits) mod = import_module(package) try: before_import_registry = copy.copy(self._registry) import_module('%s.%s' % (package, mod_name)) except Exception: self._registry = before_import_registry if module_has_submodule(mod, mod_name): raise # Compile the dynamic urlconf. for dash in self._registry.values(): urlpatterns += patterns('', url(r'^%s/' % dash.slug, include(dash._decorated_urls))) # Return the three arguments to django.conf.urls.include return urlpatterns, self.namespace, self.slug
def load_backend(backend_name=None): if not backend_name: backend_name = settings.HAYSTACK_SEARCH_ENGINE try: # Most of the time, the search backend will be one of the # backends that ships with haystack, so look there first. return importlib.import_module('haystack.backends.%s_backend' % backend_name) except ImportError, e: # If the import failed, we might be looking for a search backend # distributed external to haystack. So we'll try that next. try: return importlib.import_module('%s_backend' % backend_name) except ImportError, e_user: # The search backend wasn't found. Display a helpful error message # listing all possible (built-in) database backends. backend_dir = os.path.join(__path__[0], 'backends') available_backends = [ os.path.splitext(f)[0].split("_backend")[0] for f in os.listdir(backend_dir) if f != "base.py" and not f.startswith('_') and not f.startswith('.') and not f.endswith('.pyc') ] available_backends.sort() if backend_name not in available_backends: raise ImproperlyConfigured, "%r isn't an available search backend. Available options are: %s" % \ (backend_name, ", ".join(map(repr, available_backends))) else: raise # If there's some other error, this must be an error in Django itself.
def handle_noargs(self, **options): verbosity = int(options.get('verbosity', 1)) interactive = options.get('interactive') show_traceback = options.get('traceback', False) # Stealth option -- 'load_initial_data' is used by the testing setup # process to disable initial fixture loading. load_initial_data = options.get('load_initial_data', True) self.style = no_style() # Import the 'management' module within each installed app, to register # dispatcher events. for app_name in settings.INSTALLED_APPS: try: import_module('.management', app_name) except ImportError, exc: # This is slightly hackish. We want to ignore ImportErrors # if the "management" module itself is missing -- but we don't # want to ignore the exception if the management module exists # but raises an ImportError for some reason. The only way we # can do this is to check the text of the exception. Note that # we're a bit broad in how we check the text, because different # Python implementations may not use the same text. # CPython uses the text "No module named management" # PyPy uses "No module named myproject.myapp.management" msg = exc.args[0] if not msg.startswith('No module named') or 'management' not in msg: raise
def _load_templates(): """Return tuple with templates entries loaded in dict and choices list. Load templates modules defined under settings.TCMS_PAGES, that will be loaded in a dict which key will be name and value will be template class. Also it will be loaded in a sorted by name list of tuples where first value will be template name and second will be template verbose name. """ mod = import_module(settings.TCMS_PAGES) entries, dir_name = {}, dirname(mod.__file__) for path, subdirs, files in walk(dir_name): name = path.replace(dir_name, '').strip(sep).replace(sep, '.') for file in filter(lambda f: f.endswith('.py'), files): fname = file.replace('.py', '') import_name = filter(None, (settings.TCMS_PAGES, name, fname)) try: mod = import_module('.'.join(import_name)) if hasattr(mod, 'PAGE'): entries[name or fname] = mod.PAGE except (ImportError, AttributeError): pass return entries
def autodiscover(module_name=None): """ Auto-discover INSTALLED_APPS permissions.py modules and fail silently when not present. This forces an import on them to register any permissions bits they may want. """ import copy from django.conf import settings from django.utils.importlib import import_module from django.utils.module_loading import module_has_submodule from permission.handlers import registry module_name = module_name or settings.PERMISSION_MODULE_NAME for app in settings.INSTALLED_APPS: mod = import_module(app) # Attempt to import the app's permissions module try: before_import_registry = copy.copy(registry._registry) import_module('%s.%s' % (app, module_name)) except: # Reset the model registry to the state before tha last import as # this import will have to reoccur on the next request and this # could raise NotRegistered and AlreadyRegistered exceptions # (see #8254) registry._registry = before_import_registry # Decide whether to bubble up this error. If the app just # doesn't have an permissions module, we can ignore the error # attempting to import it, otherwise we want it to bubble up. if module_has_submodule(mod, module_name): raise
def import_project(): ''' Import moderator from project root and register all models it contains with moderation. The project root file allows you to add moderation for models that are in libraries outside the project. ''' from django.conf import settings from django.utils.importlib import import_module import imp import sys project_root = settings.ROOT_URLCONF.split(".")[0] try: app_path = import_module(project_root).__path__ except AttributeError: return None try: imp.find_module('moderator', app_path) except ImportError: return None module = import_module("%s.moderator" % project_root) return module
def get_backends(): backends = {} enabled_backends = _setting('SOCIAL_AUTH_ENABLED_BACKENDS') for mod_name in SOCIAL_AUTH_IMPORT_SOURCES: try: mod = import_module(mod_name) except ImportError: logger.exception('Error importing %s', mod_name) continue for directory, subdir, files in walk(mod.__path__[0]): for name in filter(lambda name: name.endswith('.py'), files): try: name = basename(name).replace('.py', '') sub = import_module(mod_name + '.' + name) # register only enabled backends backends.update(((key, val) for key, val in sub.BACKENDS.items() if val.enabled() and (not enabled_backends or key in enabled_backends))) except (ImportError, AttributeError): pass return backends
def wormhole_autodiscover(): for app in settings.INSTALLED_APPS: try: import_module('%s.ajax'% app) except: # no app.ajax, so just fail silently pass
def get_backends(): """ Get all wanted available backends (inspired by django-social-auth) """ backends = {} backends_by_auth = {} enabled_backends = getattr(settings, 'CORE_ENABLED_BACKENDS', ('github',)) mod_name = 'core.backends' mod = import_module('core.backends') for directory, subdir, files in walk(mod.__path__[0]): for name in filter(lambda name: name.endswith('.py') and not name.startswith('_'), files): try: name = basename(name).replace('.py', '') sub = import_module(mod_name + '.' + name) # register only enabled backends new_backends = dict((key, val) for key, val in sub.BACKENDS.items() if val.name and val.enabled() and (not enabled_backends or key in enabled_backends)) backends.update(new_backends) backends_by_auth.update((backend.auth_backend, backend) for backend in new_backends.values() if backend.auth_backend) except (ImportError, AttributeError): pass return backends, backends_by_auth
def setup_environ(dunder_file=None, project_path=None, relative_project_path=None, settings_path=None): assert not (dunder_file and project_path), ("You must not specify both " "__file__ and project_path") if dunder_file is not None: file_path = os.path.abspath(os.path.dirname(dunder_file)) if relative_project_path is not None: project_path = os.path.abspath(os.path.join(file_path, *relative_project_path)) else: project_path = file_path # the basename must be the project name and importable. project_name = os.path.basename(project_path) # setup Django correctly (the hard-coding of settings is only temporary. # carljm's proposal will remove that) if settings_path is None: if "DJANGO_SETTINGS_MODULE" not in os.environ: os.environ["DJANGO_SETTINGS_MODULE"] = "%s.settings" % project_name else: os.environ["DJANGO_SETTINGS_MODULE"] = settings_path # ensure the importablity of project sys.path.append(os.path.join(project_path, os.pardir)) import_module(project_name) sys.path.pop() sys.path.insert(0, os.path.join(project_path)) sys.path.insert(0, os.path.join(project_path, "apps"))
def handle(self, dash_name=None, **options): if dash_name is None: raise CommandError("You must provide a dashboard name.") # Use our default template if one isn't specified. if not options.get("template", None): options["template"] = self.template # We have html templates as well, so make sure those are included. options["extensions"].extend(["tmpl", "html", "js", "css"]) # Check that the app_name cannot be imported. try: import_module(dash_name) except ImportError: pass else: raise CommandError("%r conflicts with the name of an existing " "Python module and cannot be used as an app " "name. Please try another name." % dash_name) super(Command, self).handle('dash', dash_name, **options) target = options.pop("target", None) if not target: target = os.path.join(os.curdir, dash_name) # Rename our python template files. file_names = glob.glob(os.path.join(target, "*.py.tmpl")) for filename in file_names: os.rename(filename, filename[:-5])
def autodiscover(): """ Auto-discover INSTALLED_APPS report.py modules and fail silently when not present. Borrowed form django.contrib.admin """ from django.utils.importlib import import_module from django.utils.module_loading import module_has_submodule global reports for app in settings.INSTALLED_APPS: mod = import_module(app) # Attempt to import the app's admin module. try: before_import_registry = copy.copy(reports) import_module('%s.reports' % app) except: # Reset the model registry to the state before the last import as # this import will have to reoccur on the next request and this # could raise NotRegistered and AlreadyRegistered exceptions # (see #8245). reports = before_import_registry # Decide whether to bubble up this error. If the app just # doesn't have an admin module, we can ignore the error # attempting to import it, otherwise we want it to bubble up. if module_has_submodule(mod, 'reports'): raise
def load_app(self, app_name, can_postpone=False): """ Loads the app with the provided fully qualified name, and returns the model module. """ self.handled.add(app_name) self.nesting_level += 1 app_module = import_module(app_name) try: models = import_module('%s.models' % app_name) except ImportError: self.nesting_level -= 1 # If the app doesn't have a models module, we can just ignore the # ImportError and return no models for it. if not module_has_submodule(app_module, 'models'): return None # But if the app does have a models module, we need to figure out # whether to suppress or propagate the error. If can_postpone is # True then it may be that the package is still being imported by # Python and the models module isn't available yet. So we add the # app to the postponed list and we'll try it again after all the # recursion has finished (in populate). If can_postpone is False # then it's time to raise the ImportError. else: if can_postpone: self.postponed.append(app_name) return None else: raise self.nesting_level -= 1 if models not in self.app_store: self.app_store[models] = len(self.app_store) self.app_labels[self._label_for(models)] = models return models
def dajaxice_autodiscover(): """ Auto-discover INSTALLED_APPS ajax.py modules and fail silently when not present. NOTE: dajaxice_autodiscover was inspired/copied from django.contrib.admin autodiscover """ global LOADING_DAJAXICE if LOADING_DAJAXICE: return LOADING_DAJAXICE = True import imp from django.conf import settings for app in settings.INSTALLED_APPS: try: app_path = import_module(app).__path__ except AttributeError: continue try: imp.find_module('ajax', app_path) except ImportError: continue import_module("%s.ajax" % app) LOADING_DAJAXICE = False
def handle(self, *args, **kw): for app in settings.INSTALLED_APPS: models = import_module("%s.models" % app) self.inspect_models(models)
settingmodule = settingmodule.split('.', 1)[1] import posixpath import re import sys from cef import log_cef as _log_cef import MySQLdb as mysql import sqlalchemy.pool as pool from django.core.management import setup_environ import commonware.log from django.utils import importlib settings = importlib.import_module(settingmodule) # Pyflakes will complain about these, but they are required for setup. setup_environ(settings) from lib.log_settings_base import formatters, handlers, loggers # Ugh. But this avoids any zamboni or django imports at all. # Perhaps we can import these without any problems and we can # remove all this. from constants.applications import APPS_ALL from constants.platforms import PLATFORMS from constants.base import (ADDON_PREMIUM, STATUS_PUBLIC, STATUS_DISABLED, STATUS_BETA, STATUS_LITE, STATUS_LITE_AND_NOMINATED) from constants.payments import (CONTRIB_CHARGEBACK, CONTRIB_PURCHASE, CONTRIB_REFUND)
def get_request(*args, **kwargs): request = DummyRequest(*args, **kwargs) engine = import_module(settings.SESSION_ENGINE) request.session = engine.SessionStore(None) return request
################################################################################ # Responses from django.core.servers.basehttp import FileWrapper from rest_framework import status from django.http import HttpResponse from django.http.response import HttpResponseBadRequest from django.shortcuts import redirect # Requests import requests from oai_pmh.forms import RequestForm import json import os from django.utils.importlib import import_module settings_file = os.environ.get("DJANGO_SETTINGS_MODULE") settings = import_module(settings_file) OAI_HOST_URI = settings.OAI_HOST_URI OAI_USER = settings.OAI_USER OAI_PASS = settings.OAI_PASS from django.template import RequestContext, loader from mgi.models import XML2Download import datetime from mgi.models import OaiSet, OaiMetadataFormat from django.contrib.auth.decorators import login_required from django.conf import settings import lxml.etree as etree import os from StringIO import StringIO from django.core.urlresolvers import reverse
def get_cls(renderer_name): mod_path, cls_name = renderer_name.rsplit('.', 1) mod = importlib.import_module(mod_path) return getattr(mod, cls_name)
def isinst(value, class_str): split = class_str.split('.') return isinstance(value, getattr(import_module('.'.join(split[:-1])), split[-1]))
def test_shib_login(self): """ Tests that: * shib credentials that match an existing ExternalAuthMap with a linked active user logs the user in * shib credentials that match an existing ExternalAuthMap with a linked inactive user shows error page * shib credentials that match an existing ExternalAuthMap without a linked user and also match the email of an existing user without an existing ExternalAuthMap links the two and log the user in * shib credentials that match an existing ExternalAuthMap without a linked user and also match the email of an existing user that already has an ExternalAuthMap causes an error (403) * shib credentials that do not match an existing ExternalAuthMap causes the registration form to appear """ user_w_map = UserFactory.create(email='*****@*****.**') extauth = ExternalAuthMap( external_id='*****@*****.**', external_email='', external_domain='shib:https://idp.stanford.edu/', external_credentials="", user=user_w_map) user_wo_map = UserFactory.create(email='*****@*****.**') user_w_map.save() user_wo_map.save() extauth.save() inactive_user = UserFactory.create(email='*****@*****.**') inactive_user.is_active = False inactive_extauth = ExternalAuthMap( external_id='*****@*****.**', external_email='', external_domain='shib:https://idp.stanford.edu/', external_credentials="", user=inactive_user) inactive_user.save() inactive_extauth.save() idps = ['https://idp.stanford.edu/', 'https://someother.idp.com/'] remote_users = [ '*****@*****.**', '*****@*****.**', 'testuser2@someother_idp.com', '*****@*****.**' ] for idp in idps: for remote_user in remote_users: request = self.request_factory.get('/shib-login') request.session = import_module( settings.SESSION_ENGINE).SessionStore() # empty session request.META.update({ 'Shib-Identity-Provider': idp, 'REMOTE_USER': remote_user, 'mail': remote_user }) request.user = AnonymousUser() with patch('external_auth.views.AUDIT_LOG') as mock_audit_log: response = shib_login(request) audit_log_calls = mock_audit_log.method_calls if idp == "https://idp.stanford.edu/" and remote_user == '*****@*****.**': self.assertIsInstance(response, HttpResponseRedirect) self.assertEqual(request.user, user_w_map) self.assertEqual(response['Location'], '/') # verify logging: self.assertEquals(len(audit_log_calls), 2) self._assert_shib_login_is_logged(audit_log_calls[0], remote_user) method_name, args, _kwargs = audit_log_calls[1] self.assertEquals(method_name, 'info') self.assertEquals(len(args), 1) self.assertIn(u'Login success', args[0]) self.assertIn(remote_user, args[0]) elif idp == "https://idp.stanford.edu/" and remote_user == '*****@*****.**': self.assertEqual(response.status_code, 403) self.assertIn( "Account not yet activated: please look for link in your email", response.content) # verify logging: self.assertEquals(len(audit_log_calls), 2) self._assert_shib_login_is_logged(audit_log_calls[0], remote_user) method_name, args, _kwargs = audit_log_calls[1] self.assertEquals(method_name, 'warning') self.assertEquals(len(args), 1) self.assertIn(u'is not active after external login', args[0]) # self.assertEquals(remote_user, args[1]) elif idp == "https://idp.stanford.edu/" and remote_user == '*****@*****.**': self.assertIsNotNone( ExternalAuthMap.objects.get(user=user_wo_map)) self.assertIsInstance(response, HttpResponseRedirect) self.assertEqual(request.user, user_wo_map) self.assertEqual(response['Location'], '/') # verify logging: self.assertEquals(len(audit_log_calls), 2) self._assert_shib_login_is_logged(audit_log_calls[0], remote_user) method_name, args, _kwargs = audit_log_calls[1] self.assertEquals(method_name, 'info') self.assertEquals(len(args), 1) self.assertIn(u'Login success', args[0]) self.assertIn(remote_user, args[0]) elif idp == "https://someother.idp.com/" and remote_user in \ ['*****@*****.**', '*****@*****.**', '*****@*****.**']: self.assertEqual(response.status_code, 403) self.assertIn( "You have already created an account using an external login", response.content) # no audit logging calls self.assertEquals(len(audit_log_calls), 0) else: self.assertEqual(response.status_code, 200) self.assertContains( response, ("Preferences for {platform_name}".format( platform_name=settings.PLATFORM_NAME))) # no audit logging calls self.assertEquals(len(audit_log_calls), 0)
def ready(self): import_module("synoptico.receivers")
def handle_noargs(self, migrate_all=False, **options): # Import the 'management' module within each installed app, to register # dispatcher events. # This is copied from Django, to fix bug #511. try: from django.utils.importlib import import_module except ImportError: pass # TODO: Remove, only for Django1.0 else: for app_name in settings.INSTALLED_APPS: try: import_module('.management', app_name) except ImportError as exc: msg = exc.args[0] if not msg.startswith('No module named') or 'management' not in msg: raise # Work out what uses migrations and so doesn't need syncing apps_needing_sync = [] apps_migrated = [] for app in models.get_apps(): app_label = get_app_label(app) if migrate_all: apps_needing_sync.append(app_label) else: try: migrations = migration.Migrations(app_label) except NoMigrations: # It needs syncing apps_needing_sync.append(app_label) else: # This is a migrated app, leave it apps_migrated.append(app_label) verbosity = int(options.get('verbosity', 0)) # Run syncdb on only the ones needed if verbosity: print("Syncing...") old_installed, settings.INSTALLED_APPS = settings.INSTALLED_APPS, apps_needing_sync old_app_store, cache.app_store = cache.app_store, SortedDict([ (k, v) for (k, v) in cache.app_store.items() if get_app_label(k) in apps_needing_sync ]) # This will allow the setting of the MySQL storage engine, for example. for db in dbs.values(): db.connection_init() # OK, run the actual syncdb syncdb.Command().execute(**options) settings.INSTALLED_APPS = old_installed cache.app_store = old_app_store # Migrate if needed if options.get('migrate', True): if verbosity: print("Migrating...") # convert from store_true to store_false options['no_initial_data'] = not options.get('load_initial_data', True) management.call_command('migrate', **options) # Be obvious about what we did if verbosity: print("\nSynced:\n > %s" % "\n > ".join(apps_needing_sync)) if options.get('migrate', True): if verbosity: print("\nMigrated:\n - %s" % "\n - ".join(apps_migrated)) else: if verbosity: print("\nNot synced (use migrations):\n - %s" % "\n - ".join(apps_migrated)) print("(use ./manage.py migrate to migrate these)")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "fileupload_example.settings") if __name__ == '__main__': args = None HOST = '127.0.0.1' PORT = 9999 if args: host_port = args[0] HOST = host_port.split(':')[0] PORT = host_port.split(':')[1] routers = [] for sockjs_class in settings.SOCKJS_CLASSES: module_name, cls_name = sockjs_class[0].rsplit('.', 1) module = import_module(module_name) cls = getattr(module, cls_name) channel = sockjs_class[1] routers.append(SockJSRouter(cls, channel)) print('Channel {}'.format(channel)) app_settings = { 'debug': settings.DEBUG, } urls = discover_routes() for router in routers: urls += router.urls urls.append(('/settings.js$', SettingsHandler)) load_field_deserializers()
def process_request(self, request): engine = import_module(settings.SESSION_ENGINE) session_key = request.COOKIES.get(settings.SESSION_COOKIE_NAME, None) request.session = engine.SessionStore(session_key)
def handle_noargs(self, **options): database = options.get('database') connection = connections[database] verbosity = int(options.get('verbosity')) interactive = options.get('interactive') # The following are stealth options used by Django's internals. reset_sequences = options.get('reset_sequences', True) allow_cascade = options.get('allow_cascade', False) inhibit_post_syncdb = options.get('inhibit_post_syncdb', False) self.style = no_style() # Import the 'management' module within each installed app, to register # dispatcher events. for app_name in settings.INSTALLED_APPS: try: import_module('.management', app_name) except ImportError: pass sql_list = sql_flush(self.style, connection, only_django=True, reset_sequences=reset_sequences, allow_cascade=allow_cascade) if interactive: confirm = input("""You have requested a flush of the database. This will IRREVERSIBLY DESTROY all data currently in the %r database, and return each table to the state it was in after syncdb. Are you sure you want to do this? Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME']) else: confirm = 'yes' if confirm == 'yes': try: with transaction.atomic( using=database, savepoint=connection.features.can_rollback_ddl): cursor = connection.cursor() for sql in sql_list: cursor.execute(sql) except Exception as e: new_msg = ( "Database %s couldn't be flushed. Possible reasons:\n" " * The database isn't running or isn't configured correctly.\n" " * At least one of the expected database tables doesn't exist.\n" " * The SQL was invalid.\n" "Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.\n" "The full error: %s") % (connection.settings_dict['NAME'], e) six.reraise(CommandError, CommandError(new_msg), sys.exc_info()[2]) if not inhibit_post_syncdb: self.emit_post_syncdb(verbosity, interactive, database) # Reinstall the initial_data fixture. if options.get('load_initial_data'): # Reinstall the initial_data fixture. call_command('loaddata', 'initial_data', **options) else: self.stdout.write("Flush cancelled.\n")
# Auth keys url(r'^keys', include('authkeys.urls')), ) users_patterns = patterns( '', url(r'^signup/?$', account_views.signup, name='account_signup'), url(r'^signin/?$', account_views.login, name='account_login'), url(r'^signout/?$', account_views.logout, name='account_logout'), url(r'^account/', include(account_patterns)), url(r'^ban/(?P<user_id>\d+)$', views.ban_user, name='users.ban_user'), ) for provider in providers.registry.get_list(): try: prov_mod = importlib.import_module(provider.package + '.urls') except ImportError: continue prov_urlpatterns = getattr(prov_mod, 'urlpatterns', None) if prov_urlpatterns: users_patterns += prov_urlpatterns urlpatterns = patterns( '', url(r'^profiles/(?P<username>[^/]+)/?$', views.profile_view, name='users.profile'), url(r'^profiles/(?P<username>[^/]+)/roles$', user_roles, name='users.roles'), url(r'^profiles/(?P<username>[^/]+)/edit$',
def __init__(self, settings_module): # update this dict from global settings (but only for ALL_CAPS settings) for setting in dir(global_settings): if setting == setting.upper(): setattr(self, setting, getattr(global_settings, setting)) # store the settings module in case someone later cares self.SETTINGS_MODULE = settings_module try: mod = importlib.import_module(self.SETTINGS_MODULE) except ImportError as e: raise ImportError( "Could not import settings '%s' (Is it on sys.path?): %s" % (self.SETTINGS_MODULE, e)) # Settings that should be converted into tuples if they're mistakenly entered # as strings. tuple_settings = ("INSTALLED_APPS", "TEMPLATE_DIRS") for setting in dir(mod): if setting == setting.upper(): setting_value = getattr(mod, setting) if setting in tuple_settings and \ isinstance(setting_value, six.string_types): warnings.warn( "The %s setting must be a tuple. Please fix your " "settings, as auto-correction is now deprecated." % setting, PendingDeprecationWarning) setting_value = (setting_value, ) # In case the user forgot the comma. setattr(self, setting, setting_value) if not self.SECRET_KEY: raise ImproperlyConfigured( "The SECRET_KEY setting must not be empty.") if hasattr(time, 'tzset') and self.TIME_ZONE: # When we can, attempt to validate the timezone. If we can't find # this file, no check happens and it's harmless. zoneinfo_root = '/usr/share/zoneinfo' if (os.path.exists(zoneinfo_root) and not os.path.exists( os.path.join(zoneinfo_root, *(self.TIME_ZONE.split('/'))))): raise ValueError("Incorrect timezone setting: %s" % self.TIME_ZONE) # Move the time zone info into os.environ. See ticket #2315 for why # we don't do this unconditionally (breaks Windows). os.environ['TZ'] = self.TIME_ZONE time.tzset() # Settings are configured, so we can set up the logger if required if self.LOGGING_CONFIG: # First find the logging configuration function ... logging_config_path, logging_config_func_name = self.LOGGING_CONFIG.rsplit( '.', 1) logging_config_module = importlib.import_module( logging_config_path) logging_config_func = getattr(logging_config_module, logging_config_func_name) # Backwards-compatibility shim for #16288 fix compat_patch_logging_config(self.LOGGING) # ... then invoke it with the logging settings logging_config_func(self.LOGGING)
'weixin_info.weixin_name', 'weixin_article.title', ) #微信列表 class WeixinArticleReproducedRecordAdmin(BaseAdmin): list_display = ( 'weixin_article_reproduced', 'weixin_name', 'openid', 'title', 'url', 'publish_date', 'create_date', ) xadmin.site.register(ImportWeixinInfo, ImportWeixinInfoAdmin) xadmin.site.register(FilterWeixinInfo, FilterWeixinInfoAdmin) xadmin.site.register(WeixinInfo, WeixinInfoAdmin) xadmin.site.register(WeixinArticle, WeixinArticleAdmin) xadmin.site.register(WeixinArticleReproduced, WeixinArticleReproducedAdmin) xadmin.site.register(WeixinArticleReproducedRecord, WeixinArticleReproducedRecordAdmin) #自定义插件导入 import_module('plugins.operatelist') import_module('plugins.export') # import_module('plugins.listextradisplay')
def import_attribute(path): assert isinstance(path, str) pkg, attr = path.rsplit('.', 1) ret = getattr(importlib.import_module(pkg), attr) return ret
def test_get_tests(self): "Check that the get_tests helper function can find tests in a directory" module = import_module(TEST_APP_OK) tests = get_tests(module) self.assertIsInstance(tests, type(module))
def get_session(session_key): engine = import_module(settings.SESSION_ENGINE) session = engine.SessionStore(session_key) session.load() return session
def _send_request(self, message): if message.device_tokens == None or message.notification == None: logging.error( 'Message must contain device_tokens and notification.') return False # Check for resend_after retry_after = self._get_memcached(RETRY_AFTER) if retry_after != None and retry_after > datetime.now(): logging.warning('RETRY_AFTER: ' + repr(retry_after) + ', requeueing message: ' + repr(message)) self._requeue_message(message) return # Build request headers = { 'Authorization': 'key=' + GCM_CONFIG['gcm_api_key'], 'Content-Type': 'application/json' } gcm_post_json_str = '' try: gcm_post_json_str = message.json_string() except: logging.exception('Error generating json string for message: ' + repr(message)) return logging.info('Sending gcm_post_body: ' + repr(gcm_post_json_str)) request = urllib2.Request(GOOGLE_GCM_SEND_URL, gcm_post_json_str, headers) # Post try: resp = urllib2.urlopen(request) resp_json_str = resp.read() resp_json = json.loads(resp_json_str) logging.info('_send_request() resp_json: ' + repr(resp_json)) # multicast_id = resp_json['multicast_id'] # success = resp_json['success'] failure = resp_json['failure'] canonical_ids = resp_json['canonical_ids'] results = resp_json['results'] # If the value of failure and canonical_ids is 0, it's not necessary to parse the remainder of the response. if failure == 0 and canonical_ids == 0: # Success, nothing to do if 'on_success_callback_func' in GCM_CONFIG: success_callback_func_path = GCM_CONFIG[ 'on_success_callback_func'] mod_path, func_name = success_callback_func_path.rsplit( '.', 1) mod = importlib.import_module(mod_path) logging.info('success_callback_func_path: ' + repr((mod_path, func_name, mod))) success_callback_func = getattr(mod, func_name) success_callback_func(message, resp_json) return else: # Process result messages for each token (result index matches original token index from message) result_index = 0 for result in results: if 'message_id' in result and 'registration_id' in result: # Update device token try: old_device_token = message.device_tokens[ result_index] new_device_token = result['registration_id'] self.update_token(old_device_token, new_device_token) except: logging.exception('Error updating device token') elif 'error' in result: # Handle GCM error error_msg = result.get('error') try: device_token = message.device_tokens[result_index] self._on_error(device_token, error_msg, message) if 'on_error_callback_func' in GCM_CONFIG: error_callback_func_path = GCM_CONFIG[ 'on_error_callback_func'] mod_path, func_name = error_callback_func_path.rsplit( '.', 1) mod = importlib.import_module(mod_path) logging.info('error_callback_func_path: ' + repr((mod_path, func_name, mod))) error_callback_func = getattr(mod, func_name) error_callback_func(message, resp_json) except: logging.exception('Error handling GCM error: ' + repr(error_msg)) result_index += 1 except urllib2.HTTPError, e: self._incr_memcached(TOTAL_ERRORS, 1) if e.code == 400: logging.error('400, Invalid GCM JSON message: ' + repr(gcm_post_json_str)) elif e.code == 401: logging.error( '401, Error authenticating with GCM. Retrying message. Might need to fix auth key!' ) self._requeue_message(message) elif e.code == 500: logging.error( '500, Internal error in the GCM server while trying to send message: ' + repr(gcm_post_json_str)) elif e.code == 503: retry_seconds = int(resp.headers.get('Retry-After')) or 10 logging.error( '503, Throttled. Retry after delay. Requeuing message. Delay in seconds: ' + str(retry_seconds)) retry_timestamp = datetime.now() + timedelta( seconds=retry_seconds) self._set_memcached(RETRY_AFTER, retry_timestamp) self._requeue_message(message) else: logging.exception('Unexpected HTTPError: ' + str(e.code) + " " + e.msg + " " + e.read())
def test_import_error(self): "Test for #12658 - Tests with ImportError's shouldn't fail silently" module = import_module(TEST_APP_ERROR) self.assertRaises(ImportError, get_tests, module)
def compress(self, log=None, **options): """ Searches templates containing 'compress' nodes and compresses them "offline" -- outside of the request/response cycle. The result is cached with a cache-key derived from the content of the compress nodes (not the content of the possibly linked files!). """ extensions = options.get('extensions') extensions = self.handle_extensions(extensions or ['html']) verbosity = int(options.get("verbosity", 0)) if not log: log = StringIO() if not settings.TEMPLATE_LOADERS: raise OfflineGenerationError("No template loaders defined. You " "must set TEMPLATE_LOADERS in your " "settings.") paths = set() for loader in self.get_loaders(): try: module = import_module(loader.__module__) get_template_sources = getattr(module, 'get_template_sources', None) if get_template_sources is None: get_template_sources = loader.get_template_sources paths.update(list(get_template_sources(''))) except (ImportError, AttributeError): # Yeah, this didn't work out so well, let's move on pass if not paths: raise OfflineGenerationError("No template paths found. None of " "the configured template loaders " "provided template paths. See " "http://django.me/template-loaders " "for more information on template " "loaders.") if verbosity > 1: log.write("Considering paths:\n\t" + "\n\t".join(paths) + "\n") templates = set() for path in paths: for root, dirs, files in walk(path, followlinks=options.get('followlinks', False)): templates.update(os.path.join(root, name) for name in files if not name.startswith('.') and any(fnmatch(name, "*%s" % glob) for glob in extensions)) if not templates: raise OfflineGenerationError("No templates found. Make sure your " "TEMPLATE_LOADERS and TEMPLATE_DIRS " "settings are correct.") if verbosity > 1: log.write("Found templates:\n\t" + "\n\t".join(templates) + "\n") compressor_nodes = SortedDict() for template_name in templates: try: template_file = open(template_name) try: template = Template(template_file.read().decode( settings.FILE_CHARSET)) finally: template_file.close() except IOError: # unreadable file -> ignore if verbosity > 0: log.write("Unreadable template at: %s\n" % template_name) continue except TemplateSyntaxError: # broken template -> ignore if verbosity > 0: log.write("Invalid template at: %s\n" % template_name) continue except UnicodeDecodeError: if verbosity > 0: log.write("UnicodeDecodeError while trying to read " "template %s\n" % template_name) nodes = list(self.walk_nodes(template)) if nodes: compressor_nodes.setdefault(template_name, []).extend(nodes) if not compressor_nodes: raise OfflineGenerationError( "No 'compress' template tags found in templates.") if verbosity > 0: log.write("Found 'compress' tags in:\n\t" + "\n\t".join(compressor_nodes.keys()) + "\n") log.write("Compressing... ") count = 0 results = [] context = Context(settings.COMPRESS_OFFLINE_CONTEXT) for nodes in compressor_nodes.values(): for node in nodes: key = get_offline_cachekey(node.nodelist) try: result = node.render(context, forced=True) except Exception, e: raise CommandError("An error occured during rending: " "%s" % e) cache.set(key, result, settings.COMPRESS_OFFLINE_TIMEOUT) results.append(result) count += 1
def update_dashboards(modules, horizon_config, installed_apps): """Imports dashboard and panel configuration from modules and applies it. The submodules from specified modules are imported, and the configuration for the specific dashboards is merged, with the later modules overriding settings from the former. Then the configuration is applied to horizon_config and installed_apps, in alphabetical order of files from which the configurations were imported. For example, given this setup: | foo/__init__.py | foo/_10_baz.py | foo/_20_qux.py | bar/__init__.py | bar/_30_baz_.py and being called with ``modules=[foo, bar]``, we will first have the configuration from ``_10_baz`` and ``_30_baz`` merged, then the configurations will be applied in order ``qux``, ``baz`` (``baz`` is second, because the most recent file which contributed to it, ``_30_baz``, comes after ``_20_qux``). Panel specific configurations are stored in horizon_config. Dashboards from both plugin-based and openstack_dashboard must be registered before the panel configuration can be applied. Making changes to the panel is deferred until the horizon autodiscover is completed, configurations are applied in alphabetical order of files where it was imported. """ config_dashboards = horizon_config.get('dashboards', []) if config_dashboards or horizon_config.get('default_dashboard'): logging.warning( '"dashboards" and "default_dashboard" in (local_)settings is ' 'DEPRECATED now and may be unsupported in some future release. ' 'The preferred way to specify the order of dashboards and the ' 'default dashboard is the pluggable dashboard mechanism (in %s).', ', '.join( [os.path.abspath(module.__path__[0]) for module in modules])) enabled_dashboards = [] disabled_dashboards = [] exceptions = horizon_config.get('exceptions', {}) apps = [] angular_modules = [] js_files = [] js_spec_files = [] scss_files = [] panel_customization = [] update_horizon_config = {} for key, config in import_dashboard_config(modules): if config.get('DISABLED', False): if config.get('DASHBOARD'): disabled_dashboards.append(config.get('DASHBOARD')) continue _apps = config.get('ADD_INSTALLED_APPS', []) apps.extend(_apps) if config.get('AUTO_DISCOVER_STATIC_FILES', False): for _app in _apps: module = importlib.import_module(_app) base_path = os.path.join(module.__path__[0], 'static/') fd.populate_horizon_config(horizon_config, base_path) add_exceptions = six.iteritems(config.get('ADD_EXCEPTIONS', {})) for category, exc_list in add_exceptions: exceptions[category] = tuple( set(exceptions.get(category, ()) + exc_list)) angular_modules.extend(config.get('ADD_ANGULAR_MODULES', [])) # avoid pulling in dashboard javascript dependencies multiple times existing = set(js_files) js_files.extend( [f for f in config.get('ADD_JS_FILES', []) if f not in existing]) js_spec_files.extend(config.get('ADD_JS_SPEC_FILES', [])) scss_files.extend(config.get('ADD_SCSS_FILES', [])) update_horizon_config.update(config.get('UPDATE_HORIZON_CONFIG', {})) if config.get('DASHBOARD'): dashboard = key enabled_dashboards.append(dashboard) if config.get('DEFAULT', False): horizon_config['default_dashboard'] = dashboard elif config.get('PANEL') or config.get('PANEL_GROUP'): config.pop("__builtins__", None) panel_customization.append(config) # Preserve the dashboard order specified in settings dashboards = ( [d for d in config_dashboards if d not in disabled_dashboards] + [d for d in enabled_dashboards if d not in config_dashboards]) horizon_config['panel_customization'] = panel_customization horizon_config['dashboards'] = tuple(dashboards) horizon_config.setdefault('exceptions', {}).update(exceptions) horizon_config.update(update_horizon_config) horizon_config.setdefault('angular_modules', []).extend(angular_modules) horizon_config.setdefault('js_files', []).extend(js_files) horizon_config.setdefault('js_spec_files', []).extend(js_spec_files) horizon_config.setdefault('scss_files', []).extend(scss_files) # apps contains reference to applications declared in the enabled folder # basically a list of applications that are internal and external plugins # installed_apps contains reference to applications declared in settings # such as django.contribe.*, django_pyscss, compressor, horizon, etc... # for translation, we are only interested in the list of external plugins # so we save the reference to it before we append to installed_apps horizon_config.setdefault('plugins', []).extend(apps) installed_apps[0:0] = apps
def translation(language): """ Returns a translation object. This translation object will be constructed out of multiple GNUTranslations objects by merging their catalogs. It will construct a object for the requested language and add a fallback to the default language, if it's different from the requested language. """ global _translations t = _translations.get(language, None) if t is not None: return t from django.conf import settings globalpath = os.path.join( os.path.dirname(sys.modules[settings.__module__].__file__), 'locale') if settings.SETTINGS_MODULE is not None: parts = settings.SETTINGS_MODULE.split('.') project = import_module(parts[0]) projectpath = os.path.join(os.path.dirname(project.__file__), 'locale') else: projectpath = None def _fetch(lang, fallback=None): global _translations res = _translations.get(lang, None) if res is not None: return res loc = to_locale(lang) def _translation(path): try: t = gettext_module.translation('django', path, [loc], DjangoTranslation) t.set_language(lang) return t except IOError: return None res = _translation(globalpath) # We want to ensure that, for example, "en-gb" and "en-us" don't share # the same translation object (thus, merging en-us with a local update # doesn't affect en-gb), even though they will both use the core "en" # translation. So we have to subvert Python's internal gettext caching. base_lang = lambda x: x.split('-', 1)[0] if base_lang(lang) in [base_lang(trans) for trans in _translations]: res._info = res._info.copy() res._catalog = res._catalog.copy() def _merge(path): t = _translation(path) if t is not None: if res is None: return t else: res.merge(t) return res for appname in reversed(settings.INSTALLED_APPS): app = import_module(appname) apppath = os.path.join(os.path.dirname(app.__file__), 'locale') if os.path.isdir(apppath): res = _merge(apppath) localepaths = [ os.path.normpath(path) for path in settings.LOCALE_PATHS ] if (projectpath and os.path.isdir(projectpath) and os.path.normpath(projectpath) not in localepaths): res = _merge(projectpath) for localepath in reversed(settings.LOCALE_PATHS): if os.path.isdir(localepath): res = _merge(localepath) if res is None: if fallback is not None: res = fallback else: return gettext_module.NullTranslations() _translations[lang] = res return res default_translation = _fetch(settings.LANGUAGE_CODE) current_translation = _fetch(language, fallback=default_translation) return current_translation
# define Oracle comment format, and ignore them oracleSqlComment = "--" + restOfLine selectStmt.ignore(oracleSqlComment) return selectStmt SQL = setupSQLparser() ############ SQL PARSER FINISHED; SOME HELPER THINGS BELOW from django.db.models import Q, F from django.conf import settings from django.utils.importlib import import_module DICTS = import_module(settings.NODEPKG + '.dictionaries') from caselessdict import CaselessDict RESTRICTABLES = CaselessDict(DICTS.RESTRICTABLES) from types import TupleType, FunctionType from django.db.models.query_utils import Q as QType from string import strip import logging log = logging.getLogger('vamdc.tap.sql') # Q-objects for always True / False QTrue = Q(pk=F('pk')) QFalse = ~QTrue OPTRANS= { # transfer SQL operators to django-style '<': '__lt',
collect_urls(urls, node, ns=newns, prefixes=patterns) elif isinstance(item, RegexURLPattern): if item.name: for exc_pattern in getattr(settings, "JSROUTES_EXCLUDE_NAMES", []): if isinstance(exc_pattern, str): exc_pattern = re.compile(exc_pattern) if exc_pattern.match(item.name): break else: pattern = merge_patterns(prefixes, item.regex.pattern) for exc_pattern in getattr(settings, "JSROUTES_EXCLUDE_PATTERNS", []): if isinstance(exc_pattern, str): exc_pattern = re.compile(exc_pattern) if exc_pattern.match(pattern): break else: name = "%s:%s" % (ns, item.name) if ns else item.name urls.append([name, pattern]) else: raise RuntimeError("can't process %r" % item) urls = [] collect_urls(urls, import_module(settings.ROOT_URLCONF).urlpatterns) urls.reverse() urls = json.dumps(urls) tmpl = get_template("jsroutes.js") javascript = tmpl.render(Context({"urls": urls}))
def setUp(self): engine = import_module(settings.SESSION_ENGINE) store = engine.SessionStore() store.save() self.session = store self.client.cookies[settings.SESSION_COOKIE_NAME] = store.session_key
def get_traceback_html(self): "Return HTML code for traceback." if issubclass(self.exc_type, TemplateDoesNotExist): from django.template.loader import template_source_loaders self.template_does_not_exist = True self.loader_debug_info = [] for loader in template_source_loaders: try: module = import_module(loader.__module__) source_list_func = module.get_template_sources # NOTE: This assumes exc_value is the name of the template that # the loader attempted to load. template_list = [{'name': t, 'exists': os.path.exists(t)} \ for t in source_list_func(str(self.exc_value))] except (ImportError, AttributeError): template_list = [] if hasattr(loader, '__class__'): loader_name = loader.__module__ + '.' + loader.__class__.__name__ else: loader_name = loader.__module__ + '.' + loader.__name__ self.loader_debug_info.append({ 'loader': loader_name, 'templates': template_list, }) if (settings.TEMPLATE_DEBUG and hasattr(self.exc_value, 'source') and isinstance(self.exc_value, TemplateSyntaxError)): self.get_template_exception_info() frames = self.get_traceback_frames() unicode_hint = '' if issubclass(self.exc_type, UnicodeError): start = getattr(self.exc_value, 'start', None) end = getattr(self.exc_value, 'end', None) if start is not None and end is not None: unicode_str = self.exc_value.args[1] unicode_hint = smart_unicode( unicode_str[max(start - 5, 0):min(end + 5, len(unicode_str))], 'ascii', errors='replace') from django import get_version t = Template(TECHNICAL_500_TEMPLATE, name='Technical 500 template') c = Context({ 'exception_type': self.exc_type.__name__, 'exception_value': smart_unicode(self.exc_value, errors='replace'), 'unicode_hint': unicode_hint, 'frames': frames, 'lastframe': frames[-1], 'request': self.request, 'settings': get_safe_settings(), 'sys_executable': sys.executable, 'sys_version_info': '%d.%d.%d' % sys.version_info[0:3], 'server_time': datetime.datetime.now(), 'django_version_info': get_version(), 'sys_path': sys.path, 'template_info': self.template_info, 'template_does_not_exist': self.template_does_not_exist, 'loader_debug_info': self.loader_debug_info, }) return t.render(c)
def _fetch(lang, fallback=None): global _translations res = _translations.get(lang, None) if res is not None: return res loc = to_locale(lang) def _translation(path): try: t = gettext_module.translation('django', path, [loc], DjangoTranslation) t.set_language(lang) return t except IOError: return None res = _translation(globalpath) # We want to ensure that, for example, "en-gb" and "en-us" don't share # the same translation object (thus, merging en-us with a local update # doesn't affect en-gb), even though they will both use the core "en" # translation. So we have to subvert Python's internal gettext caching. base_lang = lambda x: x.split('-', 1)[0] if base_lang(lang) in [base_lang(trans) for trans in _translations]: res._info = res._info.copy() res._catalog = res._catalog.copy() def _merge(path): t = _translation(path) if t is not None: if res is None: return t else: res.merge(t) return res for appname in reversed(settings.INSTALLED_APPS): app = import_module(appname) apppath = os.path.join(os.path.dirname(app.__file__), 'locale') if os.path.isdir(apppath): res = _merge(apppath) localepaths = [ os.path.normpath(path) for path in settings.LOCALE_PATHS ] if (projectpath and os.path.isdir(projectpath) and os.path.normpath(projectpath) not in localepaths): res = _merge(projectpath) for localepath in reversed(settings.LOCALE_PATHS): if os.path.isdir(localepath): res = _merge(localepath) if res is None: if fallback is not None: res = fallback else: return gettext_module.NullTranslations() _translations[lang] = res return res
def create_session(): engine = import_module(settings.SESSION_ENGINE) session = engine.SessionStore() session.save() return session
def __init__(self): path, strategy_class = settings.BACKEND_ALLOCATOR_MODULE.rsplit('.', 1) module = importlib.import_module(path) self.strategy_mod = getattr(module, strategy_class)()