def handle(self, *args, **options): if options['all']: models = apps.get_models() feedback = "All instances from all models saved." elif options['app']: apps_list = options['app'].split() try: models_list = [] for name in apps_list: models_list.append(apps.get_models(apps.get_app(name))) except ImproperlyConfigured: return self.stdout.write("Can't find '%s' app." % ', '.join(apps_list)) else: models = [item for sublist in models_list for item in sublist] feedback = 'All instances from all models in "%s" saved.' % ', '.join(apps_list) else: try: models = [] for model in args: models.append(apps.get_model(model)) except LookupError: return self.stdout.write("Can't find '%s' model." % args) else: feedback = 'All instances saved.' self.save_objects(models) return self.stdout.write(feedback)
def eliminar_registros(request): print " >>> %s <<< "%request.POST request.META['CAMPO_XXXXXXXXXXXXXXXXXX']="-.-.-.- -. - . - . - . - . - . -" lista_destinatarios=request.POST.getlist('boton_check') print lista_destinatarios modelo_vista=request.POST['modelo'] apps.get_models() myapp = apps.get_app_config('han_app') #myapp.models index_modelo = myapp.models.keys().index(modelo_vista) print " >>> %s <<< "%myapp print " >>> %s <<< "%myapp.models print " >>> %s <<< "%myapp.models[modelo_vista] print " >>> %s <<< "%type(myapp.models.items()[index_modelo][1]) modelo=myapp.models.items()[index_modelo][1] #m = myapp.models.items()[2][1].objects.all() for i in lista_destinatarios: modelo.objects.filter(id=int(i)).delete() #m = __import__ ('han.han_app.views') #print " >>> %s <<< "%m #func = getattr(m,modelo_vista) return globals()[modelo_vista](request)
def handle(self, *app_labels, **options): # Activate project's default language translation.activate(settings.LANGUAGE_CODE) comment = options["comment"] batch_size = options["batch_size"] database = options.get('database') verbosity = int(options.get("verbosity", 1)) app_list = OrderedDict() # if no apps given, use all installed. if len(app_labels) == 0: all_apps = [config.models_module for config in apps.get_app_configs() if config.models_module is not None] for app in all_apps: if not app in app_list: app_list[app] = [] for model_class in apps.get_models(app): if not model_class in app_list[app]: app_list[app].append(model_class) else: for label in app_labels: try: app_label, model_label = label.split(".") try: app = get_app(app_label) except ImproperlyConfigured: raise CommandError("Unknown application: %s" % app_label) model_class = apps.get_model(app_label, model_label) if model_class is None: raise CommandError("Unknown model: %s.%s" % (app_label, model_label)) if app in app_list: if app_list[app] and model_class not in app_list[app]: app_list[app].append(model_class) else: app_list[app] = [model_class] except ValueError: # This is just an app - no model qualifier. app_label = label try: app = get_app(app_label) if not app in app_list: app_list[app] = [] for model_class in apps.get_models(app): if not model_class in app_list[app]: app_list[app].append(model_class) except ImproperlyConfigured: raise CommandError("Unknown application: %s" % app_label) # Create revisions. for app, model_classes in app_list.items(): for model_class in model_classes: self.create_initial_revisions(app, model_class, comment, batch_size, verbosity, database=database) # Go back to default language translation.deactivate()
def handle(self, *args, **options): if not args: apps = [] for model in a.get_models(): apps.append(a.get_app_config(model._meta.app_label)) else: apps = [] for arg in args: apps.append(a.get_app_config(arg)) for app in apps: create_permissions(app, a.get_models(), options.get('verbosity', 0))
def test_models_not_loaded(self): """ apps.get_models() raises an exception if apps.models_ready isn't True. """ apps.models_ready = False try: # The cache must be cleared to trigger the exception. apps.get_models.cache_clear() with self.assertRaisesMessage(AppRegistryNotReady, "Models aren't loaded yet."): apps.get_models() finally: apps.models_ready = True
def handle_noargs(self, **options): # XXX: (Temporary) workaround for ticket #1796: force early loading of all # models from installed apps. from django.apps import apps apps.get_models() use_plain = options.get('plain', False) no_startup = options.get('no_startup', False) interface = options.get('interface', None) try: if use_plain: # Don't bother loading IPython, because the user wants plain Python. raise ImportError self.run_shell(shell=interface) except ImportError: import code # Set up a dictionary to serve as the environment for the shell, so # that tab completion works on objects that are imported at runtime. # See ticket 5082. imported_objects = {} try: # Try activating rlcompleter, because it's handy. import readline except ImportError: pass else: # We don't have to wrap the following import in a 'try', because # we already know 'readline' was imported successfully. import rlcompleter readline.set_completer(rlcompleter.Completer(imported_objects).complete) readline.parse_and_bind("tab:complete") # We want to honor both $PYTHONSTARTUP and .pythonrc.py, so follow system # conventions and get $PYTHONSTARTUP first then .pythonrc.py. if not no_startup: for pythonrc in (os.environ.get("PYTHONSTARTUP"), '~/.pythonrc.py'): if not pythonrc: continue pythonrc = os.path.expanduser(pythonrc) if not os.path.isfile(pythonrc): continue try: with open(pythonrc) as handle: exec(compile(handle.read(), pythonrc, 'exec'), imported_objects) except NameError: pass code.interact(local=imported_objects)
def handle(self, *args, **options): if settings.MEDIA_ROOT == "": print("MEDIA_ROOT is not set, nothing to do") return # Get a list of all files under MEDIA_ROOT media = [] for root, dirs, files in os.walk(settings.MEDIA_ROOT): for f in files: media.append(os.path.abspath(os.path.join(root, f))) # Get list of all fields (value) for each model (key) # that is a FileField or subclass of a FileField model_dict = defaultdict(list) for model in apps.get_models(): for field in model._meta.fields: if issubclass(field.__class__, models.FileField): model_dict[model].append(field) # Get a list of all files referenced in the database referenced = [] for model in model_dict: all = model.objects.all().iterator() for object in all: for field in model_dict[model]: target_file = getattr(object, field.name) if target_file: referenced.append(os.path.abspath(target_file.path)) # Print each file in MEDIA_ROOT that is not referenced in the database for m in media: if m not in referenced: print(m)
def _build_kml_sources(self, sources): """ Goes through the given sources and returns a 3-tuple of the application label, module name, and field name of every GeometryField encountered in the sources. If no sources are provided, then all models. """ kml_sources = [] if sources is None: sources = apps.get_models() for source in sources: if isinstance(source, models.base.ModelBase): for field in source._meta.fields: if isinstance(field, GeometryField): kml_sources.append((source._meta.app_label, source._meta.model_name, field.name)) elif isinstance(source, (list, tuple)): if len(source) != 3: raise ValueError('Must specify a 3-tuple of (app_label, module_name, field_name).') kml_sources.append(source) else: raise TypeError('KML Sources must be a model or a 3-tuple.') return kml_sources
def _get_valid_export_models(kinds=None): """Make sure no blacklist models are included in our backup export.""" excluded_models = get_backup_setting("EXCLUDE_MODELS", required=False, default=[]) excluded_apps = get_backup_setting("EXCLUDE_APPS", required=False, default=[]) models_to_backup = [] for model in apps.get_models(include_auto_created=True): app_label = model._meta.app_label object_name = model._meta.object_name model_def = "{}_{}".format(app_label, object_name.lower()) if app_label in excluded_apps: logger.info( "Not backing up %s due to the %s app being in DJANGAE_BACKUP_EXCLUDE_APPS", model_def, app_label ) continue if model_def in excluded_models: logger.info( "Not backing up %s as it is blacklisted in DJANGAE_BACKUP_EXCLUDE_MODELS", model_def ) continue logger.info("%s added to list of models to backup", model_def) models_to_backup.append(model_def) # if kinds we explcitly provided by the caller, we only return those # already validated by our previous checks if kinds: models_to_backup = [model for model in models_to_backup if model in kinds] return models_to_backup
def _check_boolean_field_default_value(app_configs=None, **kwargs): """ Checks if there are any BooleanFields without a default value, & warns the user that the default has changed from False to None. """ from django.db import models problem_fields = [ field for model in apps.get_models(**kwargs) if app_configs is None or model._meta.app_config in app_configs for field in model._meta.local_fields if isinstance(field, models.BooleanField) and not field.has_default() ] return [ Warning( "BooleanField does not have a default value.", hint=("Django 1.6 changed the default value of BooleanField from False to None. " "See https://docs.djangoproject.com/en/1.6/ref/models/fields/#booleanfield " "for more information."), obj=field, id='1_6.W002', ) for field in problem_fields ]
def get_app_models(app_labels=None): if app_labels is None: try: # django >= 1.7, to support AppConfig from django.apps import apps return apps.get_models(include_auto_created=True) except ImportError: from django.db import models return models.get_models(include_auto_created=True) if not isinstance(app_labels, (list, tuple, set)): app_labels = [app_labels] app_models = [] try: # django >= 1.7, to support AppConfig from django.apps import apps for app_label in app_labels: app_config = apps.get_app_config(app_label) app_models.extend(app_config.get_models(include_auto_created=True)) except ImportError: from django.db import models try: app_list = [models.get_app(app_label) for app_label in app_labels] except (models.ImproperlyConfigured, ImportError) as e: raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e) for app in app_list: app_models.extend(models.get_models(app, include_auto_created=True)) return app_models
def get_models_from_cache(app): try: from django.apps import apps return apps.get_models(app) except ImportError: from django.db.models.loading import cache return cache.get_models(app)
def create_datastore_backup(request): """Creates a datastore backup based on the DJANGAE_BACKUP_X settings.""" enabled = get_backup_setting("ENABLED") if not enabled: msg = "DJANGAE_BACKUP_ENABLED is False. Not backing up" logger.info(msg) return HttpResponse(msg) gcs_bucket = get_backup_path() backup_name = get_backup_setting("NAME", required=False, default='djangae-backups') queue = get_backup_setting("QUEUE", required=False) exclude_models = get_backup_setting("EXCLUDE_MODELS", required=False, default=[]) exclude_apps = get_backup_setting("EXCLUDE_APPS", required=False, default=[]) models = [] for model in apps.get_models(include_auto_created=True): app_label = model._meta.app_label object_name = model._meta.object_name model_def = "{}.{}".format(app_label, object_name) if app_label in exclude_apps: logger.info( "Not backing up {} due to {} being in DJANGAE_BACKUP_EXCLUDE_APPS".format( model_def, app_label)) continue if model_def in exclude_models: logger.info( "Not backing up {} as it is present in DJANGAE_BACKUP_EXCLUDE_MODELS".format( model_def)) continue logger.info("Backing up {}".format(model_def)) models.append(model) if not models: raise Exception("No models to back up") # Build the target path and query for the task. params = [ ('name', backup_name), ('gs_bucket_name', gcs_bucket), ('filesystem', 'gs'), ] params.extend(('kind', m._meta.db_table) for m in models) if queue: params.append(('queue', queue)) query = urllib.parse.urlencode(params, doseq=True) backup_url = '{}?{}'.format(BACKUP_HANDLER, query) # Backups must be started via task queue or cron. taskqueue.add( method="GET", url=backup_url, target=GAE_BUILTIN_MODULE ) return HttpResponse("Started backup using URL {}".format(backup_url))
def get_models_info(show_type=True): import json from django.apps import apps result = { 'nodes': {}, 'edges': set(), } for model in apps.get_models(): result['nodes'][model.__name__] = [] for field in model._meta.fields: result['nodes'][model.__name__].append(get_field_name(field, show_type)) if hasattr(field, 'related'): if hasattr(field, 'related_model'): result['edges'].add('%s -> %s' % (field.related_model.__name__, model.__name__)) elif hasattr(field.related, 'parent_model'): result['edges'].add('%s -> %s' % (field.related.parent_model.__name__, model.__name__)) for field in model._meta.many_to_many: result['nodes'][model.__name__].append(get_field_name(field, show_type)) result['nodes'][field.rel.through.__name__] = [] result['edges'].add('%s -> %s' % (model.__name__, field.rel.through.__name__)) result['edges'].add('%s -> %s' % (field.rel.to.__name__, field.rel.through.__name__)) for sub_field in field.rel.through._meta.fields: result['nodes'][field.rel.through.__name__].append(get_field_name(sub_field, show_type)) result['edges'] = list(result['edges']) return json.dumps(result, indent=4)
def merge_objects(self, alias_objects=[]): from django.contrib.contenttypes.fields import GenericForeignKey from django.apps import apps if not isinstance(alias_objects, list): alias_objects = [alias_objects] primary_class = self.__class__ for alias_object in alias_objects: if not isinstance(alias_object, primary_class): raise TypeError('Only models of same class can be merged') generic_fields = [] for model in apps.get_models(): generic_fields.extend( filter(lambda x: isinstance(x, GenericForeignKey), model.__dict__.values())) blank_local_fields = set([field.attname for field in self._meta.local_fields if getattr( self, field.attname) in [None, '']]) for alias_object in alias_objects: for related_object in alias_object._meta.get_fields(include_hidden=True): if related_object.one_to_many and related_object.auto_created: alias_varname = related_object.get_accessor_name() obj_varname = related_object.field.name related_objects = getattr(alias_object, alias_varname) for obj in related_objects.all(): setattr(obj, obj_varname, self) obj.save() if related_object.many_to_many and related_object.auto_created: alias_varname = related_object.get_accessor_name() obj_varname = related_object.field.name if alias_varname is not None: related_many_objects = getattr( alias_object, alias_varname).all() else: related_many_objects = getattr( alias_object, obj_varname).all() for obj in related_many_objects.all(): getattr(obj, obj_varname).remove(alias_object) getattr(obj, obj_varname).add(self) for field in generic_fields: filter_kwargs = {} filter_kwargs[field.fk_field] = alias_object._get_pk_val() filter_kwargs[field.ct_field] = field.get_content_type( alias_object) for generic_related_object in field.model.objects.filter(**filter_kwargs): setattr(generic_related_object, field.name, self) generic_related_object.save() filled_up = set() for field_name in blank_local_fields: val = getattr(alias_object, field_name) if val not in [None, '']: setattr(self, field_name, val) filled_up.add(field_name) blank_local_fields -= filled_up alias_object.delete() self.save()
def get_resource_types(): resource_types = [] for model in apps.get_models(): if issubclass(model, AbstractResource) and model != BaseResource: if not getattr(model, 'archived_model', False): resource_types.append(model) return resource_types
def test_models_define_python_3_compatible_representation(self): """ In Python 2, models can define __unicode__ to get a text representation, in Python 3 this is achieved by defining __str__. The python_2_unicode_compatible decorator helps with that. We must use it every time we define a text representation; this test checks that it's used correctly. """ from django.apps import apps models = [ model for model in apps.get_models() if 'oscar' in repr(model)] invalid_models = [] for model in models: # Use abstract model if it exists if 'oscar' in repr(model.__base__): model = model.__base__ dict_ = model.__dict__ if '__str__' in dict_: if six.PY2: str_method_module = dict_['__str__'].__module__ valid = ('django.utils.encoding' == str_method_module and '__unicode__' in dict_) else: valid = '__unicode__' not in dict_ else: valid = '__unicode__' not in dict_ if not valid: invalid_models.append(model) if invalid_models: self.fail( "Those models don't use the python_2_compatible decorator or define __unicode__: %s" % invalid_models)
def all_models_of_class(cls): """Return all Django models which are subclasses of given class""" # During unit tests many of the subclasses we see will be historical models # created by the migration system # We only look at subclasses of real Django models in order to exclude them all_models = set(apps.get_models()) return all_models & _all_subclasses(cls)
def write_base_data(self): """ Exports data that would be duplicated when exporting multiple assessments """ models = apps.get_models() base_exports = BaseHawcDataExports() self.stdout.write('--- HAWC BASE DATA\n') self.stdout.write('------------------------\n') self.cursor = connection.cursor() for model in models: db_table = model._meta.db_table if db_table in self.base_tables_handled: continue if base_exports.lookup(db_table): self.base_tables_handled.append(db_table) self.stdout.write("\n--- TABLE {}\n".format(db_table)) qs = None qs = base_exports.lookup(db_table)(model, self.id_list) if qs is not None: self.write_qs_data(qs, model, db_table) else: self.stdout.write('--- no content added\n')
def write_data(self, assessment_id): self.table_handled = list(self.base_tables_handled) self.stdout.write('--- HAWC ASSESSMENT DATA\n') self.stdout.write('------------------------\n') models = apps.get_models() for model in models: db_table = model._meta.db_table if db_table in self.table_handled: continue self.table_handled.append(db_table) self.stdout.write("\n--- TABLE {}\n".format(db_table)) qs = None if hasattr(model.objects, 'assessment_qs'): qs = model.objects.assessment_qs(assessment_id) elif hasattr(model, 'assessment_qs'): qs = model.assessment_qs(assessment_id) else: print(f'--- {model} not exported\n') if qs is not None: if qs.count() == 0: continue self.write_qs_data(qs, model, db_table) else: self.stdout.write('--- no content added\n')
def handle(self, *args, **options): if options.get('model'): models = [] for model_name in options.get('model'): models.append(django_apps.get_model(*model_name.split('.'))) else: models = args or django_apps.get_models() for model in models: if 'historical' not in model._meta.label_lower: if hasattr(model, 'update_search_slugs'): self.stdout.write( self.style.WARNING('Updating \'{}\' ...'.format( model._meta.label_lower)), ending='\r') try: model.objects.update_search_slugs() except AttributeError as e: if 'update_search_slugs' in str(e): raise CommandError( 'Missing manager method \'update_search_slugs\'. ' 'See model {}. Got {}'.format( model._meta.label_lower, str(e))) else: raise CommandError(e) except Exception as e: raise CommandError( 'An exception occurred when updating model {}. ' 'Got {}'.format(model._meta.label_lower, e)) else: self.stdout.write( self.style.SUCCESS( 'Updating \'{}\' ... Done'.format( model._meta.label_lower))) else: self.stdout.write( '------- {}'.format(model._meta.label_lower))
def handle(self, *args, **options): for connection in connections.all(): with connection.cursor() as cursor: for model in apps.get_models(): if issubclass(model, models.ShardedModel): continue self.run(connection, cursor, model, **options)
def handle(self, *args, **options): print('Looking for resized fields') for Model in apps.get_models(): print(' {}.{}'.format(Model._meta.app_label, Model._meta.model_name)) resized_fields = [] for field in Model._meta.fields: if isinstance(field, ResizedImageField): resized_fields.append(field) for field in resized_fields: print(' ', field.name) images = ( Model .objects .filter(**{'{}__isnull'.format(field.name): False}) .exclude(**{field.name: ''}) .values_list(field.name, flat=True) ) for resolution in field.resolutions: print(' resizing {} images to {}'.format( len(images), resolution, )) for image in images: try: resize_image(field.storage.open(image), resolution) except IOError: print(' Image does not exist', image) if options['fails']: raise print('Resizing complete')
def getActivity(plantId): transactionIDs_qs = Transactions.objects.filter(plants_id = plantId) activities = [] for t in transactionIDs_qs: actions = Actions.objects.filter(transactions_id = t.id) for a in actions: try: property_model = next((m for m in apps.get_models() if m._meta.db_table == a.property), None) value = property_model.objects.get(id = a.value).value except: value = a.value activity = { "activityID" : a.id, "activityType" : a.action_type, "activityProperty" : a.property, "activityValue" : value, "userID" : User.objects.get(id = Transactions.objects.get(id = a.transactions_id).users_id).username, "reference" : a.reference } activities.append(activity) return activities
def _repopulate_if_necessary(self, models=None): if not hasattr(self._store, "queried_models"): self._store.queried_models = set() if not hasattr(self._store, "constructed_instances"): self._store.constructed_instances = {} self._update_queries(models) if not hasattr(self._store, "content_types"): all_models = [ (x._meta.app_label, x._meta.model_name, x) for x in apps.get_models() ] self._update_queries([(x[0], x[1]) for x in all_models]) content_types = {} for app_label, model_name, model in all_models: content_type_id = self._get_id(app_label, model_name) content_types[content_type_id] = { "id": content_type_id, "app_label": app_label, "model": model_name, "name": smart_text(model._meta.verbose_name_raw) } self._store.content_types = content_types
def check_material_fields(app_configs, **kwargs): """ Custom system check, see: https://docs.djangoproject.com/en/1.9/topics/checks/ """ errors = [] if app_configs is None: models = apps.get_models() else: models = chain.from_iterable(app_config.get_models() for app_config in app_configs) for model in models: if hasattr(model, "collecster_material_fields"): for material_field in model.collecster_material_fields : try: field = model._meta.get_field(material_field) except exceptions.FieldDoesNotExist: errors.append(checks.Error( "'{}' is not a model field, it cannot appear in 'collecster_material_fields'.".format(material_field), hint="Remove this entry from 'collecster_material_fields'.", obj=model, id='Collecster.E010', )) if field.many_to_many: errors.append(checks.Error( "Many-to-many field '{}' cannot appear in 'collecster_material_fields'.".format(field), hint="Make a through model for the field, and have this model implement the check.", obj=model, id='Collecster.E010', )) return errors
def get_models(self, options): # Load admin classes. admin.autodiscover() # Get options. app_labels = options["app_label"] # Parse model classes. if len(app_labels) == 0: selected_models = apps.get_models() else: selected_models = set() for label in app_labels: if "." in label: # This is an app.Model specifier. try: model = apps.get_model(label) except LookupError: raise CommandError("Unknown model: {}".format(label)) selected_models.add(model) else: # This is just an app - no model qualifier. app_label = label try: app = apps.get_app_config(app_label) except LookupError: raise CommandError("Unknown app: {}".format(app_label)) selected_models.update(app.get_models()) for model in selected_models: if is_registered(model): yield model
def handle(self, *args, **options): models = [] for model in apps.get_models(): for member in inspect.getmembers(model): if isinstance(member[1], FieldHistoryTracker): models.append((model, member[1].fields)) break if models: self.stdout.write('Creating initial field history for {} models\n'.format(len(models))) for model_fields in models: model = model_fields[0] fields = model_fields[1] for obj in model._default_manager.all(): for field in list(fields): data = serializers.serialize('json', [obj], fields=[field]) FieldHistory.objects.create( object=obj, field_name=field, serialized_data=data, ) else: self.stdout.write('There are no models to create field history for.')
def from_apps(cls, apps): "Takes in an Apps and returns a VersionedProjectState matching it" app_models = {} for model in apps.get_models(include_swapped=True): model_state = VersionedModelState.from_model(model) app_models[(model_state.app_label, model_state.name.lower())] = model_state return cls(app_models)
def type_q(self, klass): content_types = ContentType.objects.get_for_models(*[ model for model in apps.get_models() if issubclass(model, klass) ]).values() return Q(content_type__in=content_types)
def merge_objects(primary_object, alias_objects=None, keep_old=False): """ Use this function to merge model objects (i.e. Users, Organizations, Polls, etc.) and migrate all of the related fields from the alias objects to the primary object. Usage: from django.contrib.auth.models import User primary_user = User.objects.get(email='*****@*****.**') duplicate_user = User.objects.get(email='*****@*****.**') merge_objects(primary_user, duplicate_user) """ alias_objects = alias_objects or [] if not isinstance(alias_objects, list): alias_objects = [alias_objects] # check that all aliases are the same class as primary one and that # they are subclass of model primary_class = primary_object.__class__ if not issubclass(primary_class, Model): raise TypeError('Only django.db.models.Model subclasses can be merged') for alias_object in alias_objects: if not isinstance(alias_object, primary_class): pass #raise TypeError('Only models of same class can be merged') # Get a list of all GenericForeignKeys in all models # TODO: this is a bit of a hack, since the generics framework should provide a similar # method to the ForeignKey field for accessing the generic related fields. generic_fields = [] for model in apps.get_models(): for field_name, field in filter(lambda x: isinstance(x[1], GenericForeignKey), model.__dict__.iteritems()): generic_fields.append(field) blank_local_fields = set([field.attname for field in primary_object._meta.local_fields if getattr(primary_object, field.attname) in [None, '']]) # Loop through all alias objects and migrate their data to the primary object. for alias_object in alias_objects: # Migrate all foreign key references from alias object to primary object. for related_object in alias_object._meta.get_all_related_objects(): # The variable name on the alias_object model. alias_varname = related_object.get_accessor_name() # The variable name on the related model. obj_varname = related_object.field.name try: related_objects = getattr(alias_object, alias_varname) for obj in related_objects.all(): setattr(obj, obj_varname, primary_object) obj.save() except AttributeError as e: log.warning('unable to handle "related_objects": {}'.format(e)) pass # Migrate all many to many references from alias object to primary object. for related_many_object in alias_object._meta.get_all_related_many_to_many_objects(): alias_varname = related_many_object.get_accessor_name() obj_varname = related_many_object.field.name if alias_varname is not None: # standard case related_many_objects = getattr(alias_object, alias_varname).all() else: # special case, symmetrical relation, no reverse accessor related_many_objects = getattr(alias_object, obj_varname).all() for obj in related_many_objects.all(): getattr(obj, obj_varname).remove(alias_object) getattr(obj, obj_varname).add(primary_object) # Migrate all generic foreign key references from alias object to primary object. for field in generic_fields: filter_kwargs = {} filter_kwargs[field.fk_field] = alias_object._get_pk_val() filter_kwargs[field.ct_field] = field.get_content_type(alias_object) for generic_related_object in field.model.objects.filter(**filter_kwargs): setattr(generic_related_object, field.name, primary_object) try: generic_related_object.save() except Exception as e: pass # Try to fill all missing values in primary object by values of duplicates filled_up = set() for field_name in blank_local_fields: val = getattr(alias_object, field_name) if val not in [None, '']: setattr(primary_object, field_name, val) filled_up.add(field_name) blank_local_fields -= filled_up if not keep_old: alias_object.delete() primary_object.save() return primary_object
def get_all_models(cls): return [model for model in apps.get_models() if issubclass(model, cls)]
def _get_models_for_connection(self, connection): """Return a list of models for a connection.""" tables = connection.introspection.get_table_list(connection.cursor()) return [m for m in apps.get_models() if m._meta.db_table in tables]
def ensure_completely_loaded(force=False): """ This method ensures all models are completely loaded FeinCMS requires Django to be completely initialized before proceeding, because of the extension mechanism and the dynamically created content types. For more informations, have a look at issue #23 on github: http://github.com/feincms/feincms/issues#issue/23 """ global COMPLETELY_LOADED if COMPLETELY_LOADED and not force: return True from django.apps import apps if not apps.ready: return # Ensure meta information concerning related fields is up-to-date. # Upon accessing the related fields information from Model._meta, # the related fields are cached and never refreshed again (because # models and model relations are defined upon import time, if you # do not fumble around with models like we do in FeinCMS.) # # Here we flush the caches rather than actually _filling them so # that relations defined after all content types registrations # don't miss out. import django from distutils.version import LooseVersion if LooseVersion(django.get_version()) < LooseVersion('1.8'): for model in apps.get_models(): for cache_name in ('_field_cache', '_field_name_cache', '_m2m_cache', '_related_objects_cache', '_related_many_to_many_cache', '_name_map'): try: delattr(model._meta, cache_name) except AttributeError: pass # Randomly call some cache filling methods # http://goo.gl/XNI2qz model._meta._fill_fields_cache() # Calls to get_models(...) are cached by the arguments used in the # call. This cache is normally cleared in loading.register_models(), # but we invalidate the get_models() cache, by calling get_models above # before all apps have loaded. (Django's load_app() doesn't clear the # get_models cache as it perhaps should). So instead we clear the # get_models cache again here. If we don't do this, Django 1.5 chokes # on a model validation error (Django 1.4 doesn't exhibit this # problem). See Issue #323 on github. if hasattr(apps, 'cache'): apps.cache.get_models.cache_clear() if apps.ready: COMPLETELY_LOADED = True return True
def add_generic_relations(cls): for model in apps.get_models(): if class_is_indexed(model): TextIDGenericRelation(cls).contribute_to_class( model, 'index_entries')
def handle(self, *app_labels, **options): # Activate project's default language translation.activate(settings.LANGUAGE_CODE) comment = options["comment"] batch_size = options["batch_size"] database = options.get('database') verbosity = int(options.get("verbosity", 1)) app_list = OrderedDict() # if no apps given, use all installed. if len(app_labels) == 0: all_apps = [ config.models_module for config in apps.get_app_configs() if config.models_module is not None ] for app in all_apps: if not app in app_list: app_list[app] = [] for model_class in apps.get_models(app): if not model_class in app_list[app]: app_list[app].append(model_class) else: for label in app_labels: try: app_label, model_label = label.split(".") try: app = get_app(app_label) except ImproperlyConfigured: raise CommandError("Unknown application: %s" % app_label) model_class = apps.get_model(app_label, model_label) if model_class is None: raise CommandError("Unknown model: %s.%s" % (app_label, model_label)) if app in app_list: if app_list[app] and model_class not in app_list[app]: app_list[app].append(model_class) else: app_list[app] = [model_class] except ValueError: # This is just an app - no model qualifier. app_label = label try: app = get_app(app_label) if not app in app_list: app_list[app] = [] for model_class in apps.get_models(app): if not model_class in app_list[app]: app_list[app].append(model_class) except ImproperlyConfigured: raise CommandError("Unknown application: %s" % app_label) # Create revisions. for app, model_classes in app_list.items(): for model_class in model_classes: self.create_initial_revisions(app, model_class, comment, batch_size, verbosity, database=database) # Go back to default language translation.deactivate()
from django.contrib import admin from django.apps import apps # Register your models here. models = apps.get_models() #admin.site.register(Chowk, Signal) for model in models: try: admin.site.register(model) except admin.sites.AlreadyRegistered: pass
def get_context_data(self, **kwargs): m_list = [m._meta for m in apps.get_models()] return super().get_context_data(**{**kwargs, 'models': m_list})
def get_context_data(self, **kwargs): m_list = [m._meta for m in apps.get_models()] kwargs.update({'models': m_list}) return super(ModelIndexView, self).get_context_data(**kwargs)
def get_django_models(): return django_apps.get_models()
def api_struct_check(app_configs, **kwargs): from rest_models.backend.compiler import get_resource_path # NOQA from rest_models.router import RestModelRouter # NOQA errors = [] all_models = [] if app_configs is None: all_models.extend(apps.get_models()) else: for app_config in app_configs: all_models.extend(app_config.get_models()) router = RestModelRouter() models = ((router.get_api_connexion(model).cursor(), model) for model in all_models if router.is_api_model(model)) for db, rest_model in models: url = get_resource_path(rest_model) res = db.options(url) if res.status_code != 200: errors.append( Error( 'the remote api does not respond to us. OPTIONS %s%s => %s' % (db.url, url, res.status_code), hint= 'check the url for the remote api or the resource_path', obj=rest_model, id='rest_models.E001')) continue options = res.json() missings = { 'include[]', 'exclude[]', 'filter{}', 'page', 'per_page', 'sort[]' } - set(options.get("features", [])) if missings: errors.append( Error( 'the remote api does not support the folowing features: %s' % missings, hint='is the api on %s/%s running with dynamic-rest ?' % (db.url, url), obj=rest_model, id='rest_models.E002')) continue for field in rest_model._meta.get_fields(): if field.is_relation: if router.is_api_model(field.related_model): if field.name not in options['properties']: errors.append( Error( 'the field %s.%s in not present on the remote serializer' % (rest_model.__name__, field.name), obj="%s.%s" % (rest_model.__name__, field.name), hint= 'check if the serializer on %s/%s has a field "%s"' % (db.url, url, field.name), id='rest_models.E003')) else: type_is_many = options['properties'][ field.name]['type'] == 'many' type_is_one = options['properties'][ field.name]['type'] == 'one' if (type_is_many and not (field.one_to_many or field.many_to_many) or type_is_one and not (field.one_to_one or field.many_to_one)): errors.append( Error( 'the field %s.%s many does not match the api' % (rest_model.__name__, field.name), obj="%s.%s" % (rest_model.__name__, field.name), hint= 'check if the serializer at %s%s have a Serializer.many ' 'value corresponding to the local model %s' % (db.url, url, field.name), id='rest_models.E005')) choice_count = len( options['properties'][field.name].get( 'choices', [])) if choice_count > 100: errors.append( Warning( 'the field %s.%s has many choices values (%s) in OPTIONS ' 'and it slow down the check' % (rest_model.__name__, field.name, choice_count), obj="%s.%s" % (rest_model.__name__, field.name), hint= 'check if the serializer at %s%s provide ' 'a choices with less values for %s' % (db.url, url, field.name), id='rest_models.W001')) elif field.name not in options['properties']: errors.append( Error( 'the field %s.%s in not present on the remote serializer' % (rest_model.__name__, field.name), hint='check if the serializer on %s%s has a field "%s"' % (db.url, url, field.name), id='rest_models.E006')) return errors
def _check_relationship_model(self, from_model=None, **kwargs): if hasattr(self.through, '_meta'): qualified_model_name = "%s.%s" % (self.through._meta.app_label, self.through.__name__) else: qualified_model_name = self.through errors = [] if self.through not in apps.get_models(include_auto_created=True): # The relationship model is not installed. errors.append( checks.Error( ("Field specifies a many-to-many relation through model " "'%s', which has not been installed.") % qualified_model_name, hint=None, obj=self, id='gm2m.E101', )) else: assert from_model is not None, \ "GM2MField with intermediate " \ "tables cannot be checked if you don't pass the model " \ "where the field is attached to." # Set some useful local variables from_model_name = from_model._meta.object_name # Count foreign keys in intermediate model seen_from = sum( from_model == getattr(field.remote_field, 'model', None) for field in self.through._meta.fields) if seen_from == 0: errors.append( checks.Error( ("The model is used as an intermediate model by '%s', " "but it does not have a foreign key to '%s' or a " "generic foreign key.") % (self, from_model_name), hint=None, obj=self.through, id='gm2m.E102', )) elif seen_from > 1 and not self.through_fields: errors.append( checks.Warning( "The model is used as an intermediate model by " "'%s', but it has more than one foreign key " "from '%s', which is ambiguous. You must specify " "which foreign key Django should use via the " "through_fields keyword argument." % (self, from_model_name), hint=None, obj=self, id='gm2m.E103', )) seen_to = sum( isinstance(field, ct.GenericForeignKey) for field in self.through._meta.private_fields) if seen_to == 0: errors.append( checks.Error( "The model is used as an intermediate model by " "'%s', but it does not have a a generic foreign key." % from_model_name, hint=None, obj=self.through, id='gm2m.E104', )) elif seen_to > 1 and not self.through_fields: errors.append( checks.Warning( "The model is used as an intermediate model by " "'%s', but it has more than one generic foreign " "key, which is ambiguous. You must specify " "which generic foreign key Django should use via " "the through_fields keyword argument." % self, hint=None, obj=self, id='gm2m.E105', )) # Validate `through_fields` if self.through_fields is not None: # Validate that we're given an iterable of at least two items # and that none of them is "falsy" if not (len(self.through_fields) >= 2 and self.through_fields[0] and self.through_fields[1]): errors.append( checks.Error( ("Field specifies 'through_fields' but does not " "provide the names of the two link fields that " "should be used for the relation through model " "'%s'.") % qualified_model_name, hint=("Make sure you specify 'through_fields' as " "through_fields=('field1', 'field2')"), obj=self, id='gm2m.E106', )) # Validate the given through fields -- they should be actual # fields on the through model, and also be foreign keys to the # expected models else: assert from_model is not None, \ "GM2MField with intermediate " \ "tables cannot be checked if you don't pass the model " \ "where the field is attached to." src_field_name = self.through_fields[0] through = self.through possible_field_names = [] for f in through._meta.fields: if hasattr(f, 'remote_field') \ and getattr(f.remote_field, 'model', None) == from_model: possible_field_names.append(f.name) if possible_field_names: hint = ("Did you mean one of the following foreign keys " "to '%s': %s?") % (from_model._meta.object_name, ', '.join(possible_field_names)) else: hint = None try: field = through._meta.get_field(src_field_name) except FieldDoesNotExist: errors.append( checks.Error( "The intermediary model '%s' has no field '%s'." % (qualified_model_name, src_field_name), hint=hint, obj=self, id='gm2m.E107', )) else: if not (getattr(field, 'remote_field', None) and getattr( field.remote_field, 'model', None) == from_model): errors.append( checks.Error( "'%s.%s' is not a foreign key to '%s'." % (through._meta.object_name, src_field_name, from_model._meta.object_name), hint=hint, obj=self, id='gm2m.E108', )) target_field_name = self.through_fields[1] possible_field_names = [] for f in through._meta.private_fields: if isinstance(f, ct.GenericForeignKey): possible_field_names.append(f.name) if possible_field_names: hint = "Did you mean one of the following generic " \ "foreign keys: %s?" \ % ', '.join(possible_field_names) else: hint = None field = None for f in through._meta.private_fields: if f.name == target_field_name: field = f break else: errors.append( checks.Error( "The intermediary model '%s' has no generic " "foreign key named '%s'." % (qualified_model_name, src_field_name), hint=hint, obj=self, id='gm2m.E109', )) if field: if not isinstance(field, ct.GenericForeignKey): errors.append( checks.Error( "'%s.%s' is not a generic foreign key." % (through._meta.object_name, src_field_name), hint=hint, obj=self, id='gm2m.E110', )) return errors
def get_model_from_cls(cls): return next( (m for m in apps.get_models(include_auto_created=True) if m._meta.db_table == cls.__table__.name), None, )
def get_django_indexable_objects(): """Return all indexable objects registered in Django""" return [ model for model in apps.get_models() if issubclass(model, AbstractESDjangoIndexable) ]
from django.apps import apps from django.contrib import admin models = apps.get_models('base') # app = get_app('my_application_name') for model in models: try: admin.site.register(model) except admin.sites.AlreadyRegistered: pass
def plate(self): """ Serves up a delicious plate with your models """ request = self.request if self.settings is None: graph_settings = deepcopy(getattr(settings, 'SPAGHETTI_SAUCE', {})) graph_settings.update(self.override_settings) else: graph_settings = self.settings apps_list = graph_settings.get('apps', []) excludes = [ "%s__%s" % (app, model) for app, models in graph_settings.get('exclude', {}).items() for model in models ] models = apps.get_models() nodes = [] edges = [] for model in models: app_label = model._meta.app_label model_name = model._meta.model_name if (model is None): continue if app_label not in apps_list: continue model.is_proxy = model._meta.proxy if (model.is_proxy and not graph_settings.get('show_proxy', False)): continue model.doc = model.__doc__ _id = "%s__%s" % (app_label, model_name) if _id in excludes: continue label = self.get_node_label(model) fields = [f for f in model._meta.fields] many = [f for f in model._meta.many_to_many] if graph_settings.get('show_fields', True): label += "\n%s\n" % ("-" * len(model_name)) label += "\n".join([str(f.name) for f in fields]) edge_color = {'inherit': 'from'} for f in fields + many: if f.remote_field is not None: m = f.remote_field.model._meta to_id = "%s__%s" % (m.app_label, m.model_name) if to_id in excludes: pass elif _id == to_id and graph_settings.get( 'ignore_self_referential', False): pass else: if m.app_label != app_label: edge_color = {'inherit': 'both'} edge = {'from': _id, 'to': to_id, 'color': edge_color} if str(f.name).endswith('_ptr'): # fields that end in _ptr are pointing to a parent object edge.update({ 'arrows': { 'to': { 'scaleFactor': 0.75 } }, # needed to draw from-to 'font': { 'align': 'middle' }, 'label': 'is a', 'dashes': True }) elif type(f) == related.ForeignKey: edge.update( {'arrows': { 'to': { 'scaleFactor': 0.75 } }}) elif type(f) == related.OneToOneField: edge.update({ 'font': { 'align': 'middle' }, 'label': '|' }) elif type(f) == related.ManyToManyField: edge.update({ 'color': { 'color': 'gray' }, 'arrows': { 'to': { 'scaleFactor': 1 }, 'from': { 'scaleFactor': 1 } }, }) edges.append(edge) if model.is_proxy: proxy = model._meta.proxy_for_model._meta model.proxy = proxy edge = { 'to': _id, 'from': "%s__%s" % (proxy.app_label, proxy.model_name), 'color': edge_color, } edges.append(edge) all_node_fields = fields if graph_settings.get('show_m2m_field_detail', False): all_node_fields = fields + many nodes.append({ 'id': _id, 'label': label, 'shape': 'box', 'group': app_label, 'title': get_template(self.meatball_template_name).render({ 'model': model, 'fields': all_node_fields }) }) data = {'meatballs': json.dumps(nodes), 'spaghetti': json.dumps(edges)} return render(request, self.plate_template_name, data)
def homepage(request): # test try to get app name s = apps.get_models() print(s) # ## # return render(request, 'homepage/index.html')
def _get_model_by_name(model_name): all_models = apps.get_models(include_auto_created=True, include_swapped=True) for model in all_models: if model_name.lower() == model.__name__.lower(): return model
def get_models_with_quotas(): return [ m for m in apps.get_models() if issubclass(m, models.QuotaModelMixin) ]
def get_models(): return [(m, model_name(m)) for m in apps.get_models()]
organization_model = None unit_model = None signup_model = None permissions_by_scope = dict() # formatters formatters = dict() # documentation last_authenticated_role = None last_authenticated_username = None if not initialized: initialized = True for model in apps.get_models(): model_name = model.__name__.lower() app_label = get_metadata(model, 'app_label') add_shortcut = get_metadata(model, 'add_shortcut') list_shortcut = get_metadata(model, 'list_shortcut') list_diplay = get_metadata(model, 'list_display') verbose_name = get_metadata(model, 'verbose_name') verbose_name_plural = get_metadata(model, 'verbose_name_plural') menu = get_metadata(model, 'menu') list_menu = get_metadata(model, 'list_menu') dashboard = get_metadata(model, 'dashboard') role_signup = get_metadata(model, 'role_signup', False) field_names = [] for field in get_metadata(model, 'get_fields'): field_names.append(field.name)
def get_models(cls): cls.__protect() if cls.models == ALL_MODELS: return list(apps.get_models()) # All models known by Django. return list(cls.models)
def type_q(self, *types): all_subclasses = set(model for model in apps.get_models() if issubclass(model, types)) content_types = ContentType.objects.get_for_models(*all_subclasses) return Q(content_type__in=list(content_types.values()))
def getModel(table_name): return next((m for m in apps.get_models() if m._meta.db_table==table_name), None)
def get_all_models(cls): from django.apps import apps return [model for model in apps.get_models() if issubclass(model, cls)]
delete_images = [ current_image for fieldname, current_image in zip(image_fields, current_images) if getattr(instance, fieldname, None) != current_image ] services.delete_images(delete_images) backup_images_path(instance) def delete_images_cascaded(sender, instance, **kwargs): image_fields = getattr(instance, _IMAGE_FIELDS_ATTR, []) images = [getattr(instance, fieldname) for fieldname in image_fields] services.delete_images(images) # Setup signals for all ConfiguredImageFields for model in apps.get_models(): for field in model._meta.get_fields(include_parents=False): if isinstance(field, ConfiguredImageField): image_fields = getattr(model, _IMAGE_FIELDS_ATTR, []) if field.attname not in image_fields: image_fields.append(field.attname) setattr(model, _IMAGE_FIELDS_ATTR, image_fields) if hasattr(model, _IMAGE_FIELDS_ATTR): post_init.connect(backup_images_path_on_init, sender=model) post_save.connect(delete_old_images_on_save, sender=model) post_delete.connect(delete_images_cascaded, sender=model)
def test_get_models_only_returns_installed_models(self): self.assertNotIn( "NotInstalledModel", [m.__name__ for m in apps.get_models()])
def check_models_permissions(app_configs=None, **kwargs): if app_configs is None: models = apps.get_models() else: models = chain.from_iterable(app_config.get_models() for app_config in app_configs) Permission = apps.get_model('auth', 'Permission') permission_name_max_length = Permission._meta.get_field('name').max_length errors = [] for model in models: opts = model._meta builtin_permissions = dict(_get_builtin_permissions(opts)) # Check builtin permission name length. max_builtin_permission_name_length = (max( len(name) for name in builtin_permissions.values()) if builtin_permissions else 0) if max_builtin_permission_name_length > permission_name_max_length: verbose_name_max_length = (permission_name_max_length - (max_builtin_permission_name_length - len(opts.verbose_name_raw))) errors.append( checks.Error( "The verbose_name of model '%s.%s' must be at most %d characters " "for its builtin permission names to be at most %d characters." % (opts.app_label, opts.object_name, verbose_name_max_length, permission_name_max_length), obj=model, id='auth.E007', )) codenames = set() for codename, name in opts.permissions: # Check custom permission name length. if len(name) > permission_name_max_length: errors.append( checks.Error( "The permission named '%s' of model '%s.%s' is longer than %d characters." % (name, opts.app_label, opts.object_name, permission_name_max_length), obj=model, id='auth.E008', )) # Check custom permissions codename clashing. if codename in builtin_permissions: errors.append( checks.Error( "The permission codenamed '%s' clashes with a builtin permission " "for model '%s.%s'." % (codename, opts.app_label, opts.object_name), obj=model, id='auth.E005', )) elif codename in codenames: errors.append( checks.Error( "The permission codenamed '%s' is duplicated for model '%s.%s'." % (codename, opts.app_label, opts.object_name), obj=model, id='auth.E006', )) codenames.add(codename) return errors
def get_indexed_models(): return [ model for model in apps.get_models() if issubclass(model, Indexed) and not model._meta.abstract ]
def deploy_checks(request=None): passed = [] failed = [] # cache something now to see if it's still there further down. randval = random.randint(1, 1000000) cache.set('check_things_cache_test', randval, 60) # Django database try: n = Semester.objects.all().count() if n > 0: passed.append(('Main database connection', 'okay')) else: failed.append(('Main database connection', "Can't find any coredata.Semester objects")) except django.db.utils.OperationalError: failed.append( ('Main database connection', "can't connect to database")) except django.db.utils.ProgrammingError: failed.append(('Main database connection', "database tables missing")) # non-BMP Unicode in database try: l = LogEntry.objects.create(userid='ggbaker', description='Test Unicode \U0001F600', related_object=Semester.objects.first()) except OperationalError: failed.append(('Unicode handling in database', 'non-BMP character not supported by connection')) else: l = LogEntry.objects.get(id=l.id) if '\U0001F600' in l.description: passed.append(('Unicode handling in database', 'okay')) else: failed.append(('Unicode handling in database', 'non-BMP character not stored correctly')) # check that all database tables are utf8mb4, if mysql if settings.DATABASES['default']['ENGINE'].endswith('.mysql'): from django.apps import apps from django.db import connection CORRECT_CHARSET = 'utf8mb4' CORRECT_COLLATION = 'utf8mb4_unicode_ci' db_name = settings.DATABASES['default']['NAME'] with connection.cursor() as cursor: # check database defaults cursor.execute( "SELECT @@character_set_database, @@collation_database;") row = cursor.fetchone() if row != (CORRECT_CHARSET, CORRECT_COLLATION): failed.append(( 'MySQL database charset', 'database default CHARACTER SET and COLLATION incorrect (it is %s): consider "ALTER DATABASE %s CHARACTER SET %s COLLATE %s;"' % (row, db_name, CORRECT_CHARSET, CORRECT_COLLATION))) # check each table table_names = [model._meta.db_table for model in apps.get_models()] # inspect table charset and collations, adapted from https://stackoverflow.com/a/1049958/6871666 cursor.execute( '''SELECT T.table_name, CCSA.character_set_name, CCSA.collation_name FROM information_schema.`TABLES` T, information_schema.`COLLATION_CHARACTER_SET_APPLICABILITY` CCSA WHERE CCSA.collation_name=T.table_collation AND T.table_schema=%s AND T.table_name IN %s ''', (db_name, table_names)) for table, charset, collation in cursor.fetchall(): if (charset, collation) != (CORRECT_CHARSET, CORRECT_COLLATION): failed.append(( 'MySQL database charset', 'table %s has incorrect CHARACTER SET and COLLATION: consider "ALTER TABLE %s CHARACTER SET=%s COLLATE=%s;"' % (table, table, CORRECT_CHARSET, CORRECT_COLLATION))) cursor.execute( '''SELECT table_name, column_name, character_set_name, collation_name FROM information_schema.`COLUMNS` WHERE table_schema=%s AND (character_set_name IS NOT NULL OR collation_name IS NOT NULL) AND (character_set_name!=%s OR collation_name!=%s); ''', (db_name, CORRECT_CHARSET, CORRECT_COLLATION)) for table, column, charset, collation in cursor.fetchall(): failed.append(( 'MySQL database charset', 'table %s has incorrect CHARACTER SET and COLLATION on a column (%s and %s): consider "ALTER TABLE %s CONVERT TO CHARACTER SET %s COLLATE %s;"' % (table, charset, collation, table, CORRECT_CHARSET, CORRECT_COLLATION))) # Celery tasks celery_okay = False sims_task = None try: if settings.USE_CELERY: try: from coredata.tasks import ping except ImportError: failed.append( ('Celery task', "Couldn't import task: probably missing MySQLdb module")) else: try: task = ping.apply_async() except kombu.exceptions.OperationalError: failed.append( ('Celery task', 'Kombu error. Probably RabbitMQ not running.')) except amqp.exceptions.AccessRefused: failed.append(( 'Celery task', 'AccessRefused error. Probably bad RabbitMQ auth details.' )) else: from coredata.tasks import check_sims_task sims_task = check_sims_task.apply_async( ) # start here, in case it's slow res = task.get(timeout=5) if res == True: passed.append(('Celery task', 'okay')) celery_okay = True else: failed.append( ('Celery task', 'got incorrect result from task')) else: failed.append(('Celery task', 'celery disabled in settings')) except celery.exceptions.TimeoutError: failed.append( ('Celery task', "didn't get result before timeout: celeryd maybe not running")) except socket.error: failed.append(('Celery task', "can't communicate with broker")) except NotImplementedError: failed.append( ('Celery task', 'celery failed to start with NotImplementedError')) except django.db.utils.ProgrammingError: failed.append(('Celery task', 'celery DB tables missing')) except django.db.utils.OperationalError: failed.append(('Celery task', 'djkombu tables missing: try migrating')) # celery beat if settings.USE_CELERY: try: from coredata.tasks import beat_time_okay if beat_time_okay(): passed.append(('Celery beat', 'okay')) else: failed.append(( 'Celery beat', 'marker file is old: celery beat likely not processing tasks' )) except OSError: failed.append(( 'Celery beat', 'marker file is missing: celery beat likely not processing tasks' )) # Django cache # (has a subprocess do something to make sure we're in a persistent shared cache, not DummyCache) subprocess.call( ['python3', 'manage.py', 'check_things', '--cache_subcall']) cache_okay = False res = cache.get('check_things_cache_test') if res == randval: failed.append(( 'Django cache', 'other processes not sharing cache: dummy/local probably being used instead of memcached' )) elif res is None: failed.append( ('Django cache', 'unable to retrieve anything from cache')) elif res != randval + 1: failed.append(('Django cache', 'unknown result')) else: passed.append(('Django cache', 'okay')) cache_okay = True # Reporting DB connection try: db = SIMSConn() db.execute("SELECT last_name FROM ps_names WHERE emplid=301355288", ()) result = list(db) # whoever this is, they have non-ASCII in their name: let's hope they don't change it. lname = result[0][0] if not isinstance(lname, str): failed.append( ('Reporting DB connection', 'string result not a string: check Unicode decoding')) elif lname[1] != u'\u00e4': failed.append(('Reporting DB connection', 'returned incorrectly-decoded Unicode')) elif len(result) == 0: failed.append(('Reporting DB connection', 'query inexplicably returned nothing')) else: passed.append(('Reporting DB connection', 'okay')) except SIMSProblem as e: failed.append( ('Reporting DB connection', 'SIMSProblem, %s' % (str(e)))) except ImportError: failed.append( ('Reporting DB connection', "couldn't import DB2 module")) except Exception as e: failed.append( ('Reporting DB connection', 'Generic exception, %s' % (str(e)))) if settings.USE_CELERY and sims_task: # sims_task started above, so we can double-up on any wait try: res = sims_task.get(timeout=5) if res: failed.append(('Celery Reporting DB', res)) else: passed.append(('Celery Reporting DB', 'okay')) except celery.exceptions.TimeoutError: failed.append(( 'Celery Reporting DB', "didn't get result before timeout: maybe reporting database is slow?" )) elif sims_task is None: failed.append( ('Celery Reporting DB', "didn't check because of Celery failure")) # compression enabled? if settings.COMPRESS_ENABLED: passed.append(('Asset compression enabled', 'okay')) else: failed.append(('Asset compression enabled', 'disabled in settings')) # Haystack searching from haystack.query import SearchQuerySet try: res = SearchQuerySet().filter(text='cmpt') if res: passed.append(('Haystack search', 'okay')) else: failed.append(( 'Haystack search', 'nothing found: maybe update_index, or wait for search server to fully start' )) except IOError: failed.append(('Haystack search', "can't read/write index")) # photo fetching if cache_okay and celery_okay: try: res = do_photo_fetch(['301222726']) if '301222726' not in res: # I don't know who 301222726 is, but he/she is real. failed.append( ('Photo fetching', "didn't find photo we expect to exist")) else: passed.append(('Photo fetching', 'okay')) except (KeyError, Unit.DoesNotExist, django.db.utils.ProgrammingError): failed.append(('Photo fetching', 'photo password not set')) except urllib.error.HTTPError as e: failed.append( ('Photo fetching', 'failed to fetch photo (%s). Maybe wrong password?' % (e))) else: failed.append( ('Photo fetching', 'not testing since memcached or celery failed')) # emplid/userid API emplid = userid_to_emplid('ggbaker') if not emplid: failed.append(('Emplid API', 'no emplid returned')) elif isinstance(emplid, str) and not emplid.startswith('2000'): failed.append(('Emplid API', 'incorrect emplid returned')) else: passed.append(('Emplid API', 'okay')) # file creation in the necessary places dirs_to_check = [ (settings.DB_BACKUP_DIR, 'DB backup dir'), (settings.SUBMISSION_PATH, 'submitted files path'), (os.path.join(settings.COMPRESS_ROOT, 'CACHE'), 'compressed media root'), ] for directory, label in dirs_to_check: res = _check_file_create(directory) if res is None: passed.append(('File creation in ' + label, 'okay')) else: failed.append(('File creation in ' + label, res)) # are any services listening publicly that shouldn't? hostname = socket.gethostname() ports = [ 25, # mail server #4369, # epmd, erlang port mapper daemon is okay to listen externally and won't start with ERL_EPMD_ADDRESS set. http://serverfault.com/questions/283913/turn-off-epmd-listening-port-4369-in-ubuntu-rabbitmq 45130, # beam? rabbitmq something 4000, # main DB stunnel 50000, # reporting DB 8000, # gunicorn 11211, # memcached 9200, 9300, # elasticsearch 8983, # solr ] connected = [] for p in ports: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.connect((hostname, p)) except socket.error: # couldn't connect: good pass else: connected.append(p) finally: s.close() if connected: failed.append( ('Ports listening externally', 'got connections to port ' + ','.join(str(p) for p in connected))) else: passed.append(('Ports listening externally', 'okay')) # correct serving/redirecting of production domains if settings.DEPLOY_MODE == 'production': production_host_fails = 0 for host in settings.SERVE_HOSTS + settings.REDIRECT_HOSTS: # check HTTPS serving/redirect try: url = 'https://' + host + reverse( 'docs:list_docs' ) # must be a URL that doesn't require auth resp = requests.get(url, allow_redirects=False, timeout=5) if host in settings.SERVE_HOSTS and resp.status_code != 200: failed.append(('HTTPS Serving', 'expected 200 okay, but got %i at %s' % (resp.status_code, url))) production_host_fails += 1 elif host in settings.REDIRECT_HOSTS and resp.status_code != 301: failed.append(('HTTPS Serving', 'expected 301 redirect, but got %i at %s' % (resp.status_code, url))) production_host_fails += 1 except requests.exceptions.SSLError: failed.append(('HTTPS Serving', 'bad SSL/TLS certificate for %s' % (url, ))) production_host_fails += 1 except requests.exceptions.RequestException: failed.append(('HTTPS Serving', 'unable to connect to request %s' % (url, ))) production_host_fails += 1 # check HTTP redirect try: url = 'http://' + host + reverse( 'docs:list_docs' ) # must be a URL that doesn't require auth resp = requests.get(url, allow_redirects=False, timeout=5) if resp.status_code != 301: failed.append(( 'HTTP Serving', 'expected 301 redirect to https://, but got %i at %s' % (resp.status_code, url))) production_host_fails += 1 except requests.exceptions.RequestException: failed.append(('HTTP Serving', 'unable to connect to request %s' % (url, ))) production_host_fails += 1 if production_host_fails == 0: passed.append(( 'HTTPS Serving', 'okay: certs and redirects as expected, but maybe check http://www.digicert.com/help/ or https://www.ssllabs.com/ssltest/' )) # is the server time close to real-time? import ntplib try: c = ntplib.NTPClient() response = c.request('pool.ntp.org') if abs(response.offset) > 0.1: failed.append( ('Server time', 'Time is %g seconds off NTP pool.' % (response.offset, ))) else: passed.append(('Server time', 'okay')) except ntplib.NTPException as e: failed.append(('Server time', 'Unable to query NTP pool: %s' % (e, ))) # library sanity err = bitfield_check() if err: failed.append(('Library sanity', 'django-bitfield: ' + err)) else: err = cache_check() if err: failed.append(('Library sanity', 'django cache: ' + err)) else: passed.append(('Library sanity', 'okay')) # github-flavoured markdown from courselib.github_markdown import markdown_to_html_rpc, markdown_to_html_subprocess md = 'test *markup*\n\n```python\nprint(1)\n```\n\u2605\U0001F600' correct = '<p>test <em>markup</em></p>\n<pre lang="python"><code>print(1)\n</code></pre>\n<p>\u2605\U0001F600</p>' try: # checks that ruby subprocess runs; does github-flavour correctly; does Unicode correctly. html = markdown_to_html_subprocess(md, fallback=False) if html.strip() == correct: passed.append(('Markdown subprocess', 'okay')) else: failed.append(('Markdown subprocess', 'markdown script returned incorrect markup')) except OSError: failed.append(( 'Markdown subprocess', 'failed to start ruby command: ruby package probably not installed' )) except RuntimeError: failed.append(('Markdown subprocess', 'markdown script failed')) try: # checks that docker RPC runs; does github-flavour correctly; does Unicode correctly. html = markdown_to_html_rpc(md, fallback=False) if html.strip() == correct: passed.append(('Markdown RPC', 'okay')) else: failed.append( ('Markdown RPC', 'markdown script returned incorrect markup')) except OSError: failed.append( ('Markdown RPC', 'unable to connect for RPC: docker container may be down')) except AttributeError: failed.append( ('Markdown RPC', 'unable to connect to RabbitMQ: not configured in settings.py')) # MOSS subprocess from submission.moss import check_moss_executable check_moss_executable(passed, failed) # locale is UTF-8 (matters for markdown script calls, the SIMS database connection) import locale _, encoding = locale.getdefaultlocale() if encoding == 'UTF-8': passed.append(('Locale encoding', 'okay')) else: failed.append( ('Locale encoding', "is %r; should be 'UTF-8'" % (encoding, ))) return passed, failed