def post(self, request, *args, **kwargs): for_model_name = request.GET.get('for') for_id = request.GET.get('id') self.for_item = apps.get_model( 'app.' + for_model_name).objects.get(pk=for_id, is_active=True) self.model = apps.get_model('app.Comment_' + for_model_name) return super(CommentCreateFormView, self).post(request, * args, **kwargs)
def emit(self, record): record.ip = '0.0.0.0' record.username = '******' request = record.args.get('r', None) if request: ip = request.META.get('REMOTE_ADDR', '0.0.0.0') try: ip_address_validators('both', ip) except: ip = '0.0.0.0' record.ip = ip if request.user.is_authenticated(): record.username = request.user.username if 'uid' in record.args: record.username = record.args['uid'] hide = record.args.get('hide', False) UserProfile = apps.get_model('core', 'UserProfile') profile = UserProfile.objects.filter(user__username=record.username).first() if profile and not hide: manager = profile.user.user_logs if manager.count() >= 30: manager.order_by('time')[0].delete() UserLog = apps.get_model('core', 'UserLog') UserLog(user=profile.user, level=record.levelno, ip=record.ip, text='%s.%s' % (record.name, record.getMessage())).save() super(FileHandler, self).emit(record)
def post_save(self, obj, created=False): # Code related to the hack of pre_save method. # Rather, this is the continuation of it. if self._role_points: Points = apps.get_model("projects", "Points") RolePoints = apps.get_model("userstories", "RolePoints") for role_id, points_id in self._role_points.items(): try: role_points = RolePoints.objects.get(role__id=role_id, user_story_id=obj.pk, role__computable=True) except (ValueError, RolePoints.DoesNotExist): raise exc.BadRequest({ "points": _("Invalid role id '{role_id}'").format(role_id=role_id) }) try: role_points.points = Points.objects.get(id=points_id, project_id=obj.project_id) except (ValueError, Points.DoesNotExist): raise exc.BadRequest({ "points": _("Invalid points id '{points_id}'").format(points_id=points_id) }) role_points.save() super().post_save(obj, created)
def new_session_model(self): ''' Create a Widget or Slide or Playlist session instance corresponding to a Widget or Slide or Playlist instance. We can specify the app and model name of the session model corresponding to this model using a tuple named `conf.SessionModelName`. If that is not there, look for a model in this app with the same name as this class but with "Session" prepended to it. ''' app_label = self._meta.app_label try: SessionModelAppName, SessionModelName = getattr(self, conf.SessionModelName) SessionModel = apps.get_model(SessionModelAppName or app_label, SessionModelName) except: SessionModelName = 'Session' + self.class_name SessionModel = apps.get_model(app_label, SessionModelName) return SessionModel.new(self)
def project_post_save(sender, instance, created, **kwargs): """ Populate new project dependen default data """ if not created: return if instance._importing: return template = getattr(instance, "creation_template", None) if template is None: ProjectTemplate = apps.get_model("projects", "ProjectTemplate") template = ProjectTemplate.objects.get(slug=settings.DEFAULT_PROJECT_TEMPLATE) template.apply_to_project(instance) instance.save() Role = apps.get_model("users", "Role") try: owner_role = instance.roles.get(slug=template.default_owner_role) except Role.DoesNotExist: owner_role = instance.roles.first() if owner_role: Membership = apps.get_model("projects", "Membership") Membership.objects.create(user=instance.owner, project=instance, role=owner_role, is_admin=True, email=instance.owner.email)
def task_index_document(self, document_id): Document = apps.get_model( app_label='documents', model_name='Document' ) Index = apps.get_model( app_label='document_indexing', model_name='Index' ) try: document = Document.objects.get(pk=document_id) except Document.DoesNotExist: # Document was deleted before we could execute, abort about # updating pass else: try: Index.objects.index_document(document=document) except OperationalError as exception: logger.warning( 'Operational error while trying to index document: ' '%s; %s', document, exception ) raise self.retry(exc=exception) except LockError as exception: logger.warning( 'Unable to acquire lock for document %s; %s ', document, exception ) raise self.retry(exc=exception)
def emit(self, record): """ Handles an emitted log entry and stores it in the database, optionally linking it to the model object `obj` """ obj = record.__dict__.get('obj', None) if obj is None or not isinstance(obj, models.Model) or obj.pk is None: content_type, object_id = None, None else: content_type = apps.get_model('contenttypes', 'ContentType').objects.get_for_model(obj) object_id = obj.pk try: log_entry = apps.get_model('instance', 'LogEntry').objects.create( level=record.levelname, text=self.format(record), content_type=content_type, object_id=object_id ) except ProgrammingError: # This can occur if django tries to log something before migrations have created the log table. # Make sure that is actually what happened: assert 'instance_logentry' not in connection.introspection.table_names() # Send notice of entries related to any resource. Skip generic log entries that occur # in debug mode, like "GET /static/img/favicon/favicon-96x96.png": if content_type: log_event = { 'type': 'object_log_line', 'log_entry': LogEntrySerializer(log_entry).data } if hasattr(obj, 'event_context'): log_event.update(obj.event_context) # TODO: Filter out log entries for which the user doesn't have view rights # TODO: More targetted events - only emit events for what the user is looking at publish_data('log', log_event)
def _model(self): model_mdl = apps.get_model("accounting", "ModelEntry") model_mdl.objects.all().delete() modelline_mdl = apps.get_model("accounting", "ModelLineEntry") modelline_mdl.objects.all().delete() self.model_list = {} self.modelline_list = {} cur_m = self.old_db.open() cur_m.execute( "SELECT id, journal, designation FROM fr_sdlibre_compta_Model") for modelid, journal, designation in cur_m.fetchall(): self.print_debug("=> model %s", (designation,)) self.model_list[modelid] = model_mdl.objects.create( journal=self.journal_list[journal], designation=designation) cur_ml = self.old_db.open() cur_ml.execute( "SELECT id, model, compte, montant, tiers FROM fr_sdlibre_compta_ModelLigne") for modellineid, model, compte, montant, tiers in cur_ml.fetchall(): self.print_debug("=> model line %d %s", (model, compte)) self.modelline_list[modellineid] = modelline_mdl.objects.create( model=self.model_list[model], code=convert_code(compte), amount=montant) if tiers is not None: self.modelline_list[modellineid].third = self.third_list[tiers] self.modelline_list[modellineid].save()
def _thirds(self): third_mdl = apps.get_model("accounting", "Third") third_mdl.objects.all().delete() accountthird_mdl = apps.get_model("accounting", "AccountThird") accountthird_mdl.objects.all().delete() self.third_list = {} cur = self.old_db.open() cur.execute( "SELECT id,contact,compteFournisseur,compteClient,compteSalarie,compteSocietaire,etat FROM fr_sdlibre_compta_Tiers") for thirdid, abstractid, compte_fournisseur, compte_client, compte_salarie, compte_societaire, etat in cur.fetchall(): self.print_debug( "=> Third of %s", (self.abstract_list[abstractid],)) self.third_list[thirdid] = third_mdl.objects.create( contact=self.abstract_list[abstractid], status=etat) if (compte_fournisseur is not None) and (compte_fournisseur != ''): accountthird_mdl.objects.create( third=self.third_list[thirdid], code=convert_code(compte_fournisseur)) if (compte_client is not None) and (compte_client != ''): accountthird_mdl.objects.create( third=self.third_list[thirdid], code=convert_code(compte_client)) if (compte_salarie is not None) and (compte_salarie != ''): accountthird_mdl.objects.create( third=self.third_list[thirdid], code=convert_code(compte_salarie)) if (compte_societaire is not None) and (compte_societaire != ''): accountthird_mdl.objects.create( third=self.third_list[thirdid], code=convert_code(compte_societaire))
def expulsio_despres_de_posar(instance): professor_recull = instance.professor_recull professor_expulsa = instance.professor Missatge = apps.get_model( 'missatgeria','Missatge') # missatge pel professor que recull la incidència: if professor_recull != professor_expulsa: missatge = HAS_RECOLLIT_EXPULSIO tipus_de_missatge = tipusMissatge(missatge) msg = Missatge( remitent = professor_recull.getUser(), text_missatge = missatge.format( unicode( instance ) ), tipus_de_missatge = tipus_de_missatge ) msg.envia_a_usuari(instance.professor_recull.getUser(), 'PI') # missatge pel professor que expulsa: missatge = CAL_TRAMITAR_EXPULSIO tipus_de_missatge = tipusMissatge(missatge) msg = Missatge( remitent = professor_recull.getUser(), text_missatge = missatge.format( unicode( instance ) ), enllac = '/incidencies/editaExpulsio/{0}/'.format( instance.pk ), tipus_de_missatge = tipus_de_missatge) msg.envia_a_usuari(instance.professor.getUser(), 'VI') # missatge pels professors que tenen aquest alumne a l'aula (exepte el professor que expulsa): msg = Missatge( remitent = professor_recull.getUser(), text_missatge = unicode( instance ), tipus_de_missatge = 'INFORMATIVES_DISCIPLINA') Professor = apps.get_model( 'usuaris','Professor') professors_que_tenen_aquest_alumne_a_classe = Professor.objects.filter( horari__impartir__controlassistencia__alumne = instance.alumne ).exclude( pk = instance.professor.pk ).distinct() for professor in professors_que_tenen_aquest_alumne_a_classe: esTutor = True if professor in instance.alumne.tutorsDeLAlumne() else False importancia = 'VI' if esTutor else 'PI' msg.envia_a_usuari( professor.getUser(), importancia )
def enroll_user(user, course_key): # Activate user registration = world.RegistrationFactory(user=user) registration.register(user) registration.activate() # Enroll them in the course apps.get_model('student', 'CourseEnrollment').enroll(user, course_key)
def get_stats_for_user(from_user, by_user): """Get the user stats""" project_ids = get_visible_project_ids(from_user, by_user) total_num_projects = len(project_ids) roles = [_(r) for r in from_user.memberships.filter(project__id__in=project_ids).values_list( "role__name", flat=True)] roles = list(set(roles)) User = apps.get_model('users', 'User') total_num_contacts = User.objects.filter(memberships__project__id__in=project_ids)\ .exclude(id=from_user.id)\ .distinct()\ .count() UserStory = apps.get_model('userstories', 'UserStory') total_num_closed_userstories = UserStory.objects.filter( is_closed=True, project__id__in=project_ids, assigned_to=from_user).count() project_stats = { 'total_num_projects': total_num_projects, 'roles': roles, 'total_num_contacts': total_num_contacts, 'total_num_closed_userstories': total_num_closed_userstories, } return project_stats
def create(self, validated_data): property = validated_data['property_id'] observable_property = models.ObservableProperty.objects.get(id=property) validated_data['value'] = observable_property.get_internal_value( validated_data['value']) observation_type = observable_property.observation_type ModelClass = apps.get_model(observation_type) with transaction.atomic(): if (validated_data['add_maintenance_observation']): if validated_data['property_id'] == 'ski_trail_condition': observable_property = models.ObservableProperty.objects.get(id='ski_trail_maintenance') MaintenanceModelClass = apps.get_model(observable_property.observation_type) obj = MaintenanceModelClass.objects.create( unit_id=validated_data['unit_id'], property_id='ski_trail_maintenance', time=validated_data['time'], auth=validated_data['auth'], value=observable_property.get_internal_value('maintenance_finished')) models.UnitLatestObservation.objects.update_or_create( unit_id=validated_data['unit_id'], property_id='ski_trail_maintenance', defaults={'observation_id': obj.pk}) del validated_data['add_maintenance_observation'] obj = ModelClass.objects.create(**validated_data) models.UnitLatestObservation.objects.update_or_create( unit_id=validated_data['unit_id'], property_id=validated_data['property_id'], defaults={'observation_id': obj.pk}) return obj
def move_subscription(remove_users, source_event, source_node, new_event, new_node): """Moves subscription from old_node to new_node :param remove_users: dictionary of lists of users to remove from the subscription :param source_event: A specific guid event <guid>_file_updated :param source_node: Instance of Node :param new_event: A specific guid event :param new_node: Instance of Node :return: Returns a NOTIFICATION_TYPES list of removed users without permissions """ NotificationSubscription = apps.get_model('osf.NotificationSubscription') OSFUser = apps.get_model('osf.OSFUser') if source_node == new_node: return old_sub = NotificationSubscription.load(to_subscription_key(source_node._id, source_event)) if not old_sub: return elif old_sub: old_sub._id = to_subscription_key(new_node._id, new_event) old_sub.event_name = new_event old_sub.owner = new_node new_sub = old_sub new_sub.save() # Remove users that don't have permission on the new node. for notification_type in constants.NOTIFICATION_TYPES: if new_sub: for user_id in remove_users[notification_type]: related_manager = getattr(new_sub, notification_type, None) subscriptions = related_manager.all() if related_manager else [] if user_id in subscriptions: user = OSFUser.load(user_id) new_sub.remove_user_from_subscription(user)
def get_parent_notification_type(node, event, user): """ Given an event on a node (e.g. comment on node 'xyz'), find the user's notification type on the parent project for the same event. :param obj node: event owner (Node or User object) :param str event: notification event (e.g. 'comment_replies') :param obj user: modular odm User object :return: str notification type (e.g. 'email_transactional') """ AbstractNode = apps.get_model('osf.AbstractNode') NotificationSubscription = apps.get_model('osf.NotificationSubscription') if node and isinstance(node, AbstractNode) and node.parent_node and node.parent_node.has_permission(user, 'read'): parent = node.parent_node key = to_subscription_key(parent._id, event) try: subscription = NotificationSubscription.find_one(Q('_id', 'eq', key)) except NoResultsFound: return get_parent_notification_type(parent, event, user) for notification_type in constants.NOTIFICATION_TYPES: if getattr(subscription, notification_type).filter(id=user.id).exists(): return notification_type else: return get_parent_notification_type(parent, event, user) else: return None
def get_model (*args): # Select app name if hasattr(settings, "MODEL_APP") \ and settings.MODEL_APP in settings.INSTALLED_APPS \ and len(args) < 2: app_name = settings.MODEL_APP elif len(args) == 2: app_name = args[0] else: app_name = "nextify" if app_name.startswith("apps."): app_name = app_name.replace("apps.", "") model_name = args[0] if len(args) < 2 else args[1] if isinstance(model_name, (list, tuple)): classes = [] for i in model_name: classes.append(apps.get_model( app_name, i )) return classes else: return apps.get_model( app_name, model_name )
def create_user(self, username, is_staff=False, is_superuser=False, is_active=True, permissions=None, create_employee=True): """ This method is used to create users in test cases """ user = get_user_model() username_field = user.USERNAME_FIELD fields = { 'email': username + '@test.django-bmf.org', 'is_staff': is_staff, 'is_active': is_active, 'is_superuser': is_superuser, } # Check for special case where email is used as username # if username_field != 'email': fields[username_field] = username user_obj = user(**fields) user_obj.set_password(getattr(user_obj, username_field)) user_obj.save() # create employee object for user try: apps.get_model(settings.CONTRIB_EMPLOYEE).objects.create(user=user_obj) except LookupError: pass if not is_superuser and permissions: for permission in permissions: user_obj.user_permissions.add(Permission.objects.get(codename=permission)) return user_obj
def gen_userfiles(apps, schema_editor): WooeyFile = apps.get_model('wooey', 'WooeyFile') UserFile = apps.get_model('wooey', 'UserFile') Favorite = apps.get_model('wooey', 'Favorite') update_all_contenttypes() ContentType = apps.get_model("contenttypes", "ContentType") ctype = ContentType.objects.get(model='wooeyfile') new_ctype = ContentType.objects.get(model='userfile') import os checksums = {} to_delete = [] for obj in WooeyFile.objects.all(): # we only do this for uploads if obj.parameter is None or obj.parameter.parameter.is_output == True: file_to_use = obj else: checksum = obj.checksum file_to_use = checksums.get(checksum, obj) if checksum is not None and checksum not in checksums: checksums[checksum] = file_to_use if file_to_use != obj: to_delete.append(obj.pk) user_file = UserFile(filename=os.path.split(obj.filepath.name)[1], job=obj.job, parameter=obj.parameter, system_file=file_to_use) user_file.save() favorites = Favorite.objects.filter(content_type=ctype, object_id=obj.id) for favorite in favorites: favorite.content_object = user_file favorite.content_type = new_ctype favorite.save() WooeyFile.objects.filter(pk__in=to_delete).delete()
def invalidate_caches_study(sender, instance, **kwargs): models.Study.delete_caches([instance.id]) if instance.bioassay: clear_cache( apps.get_model('animal', 'Endpoint'), {'animal_group__experiment__study': instance.id} ) if instance.epi: clear_cache( apps.get_model('epi', 'Outcome'), {'study_population__study': instance.id} ) if instance.in_vitro: clear_cache( apps.get_model('invitro', 'ivendpoint'), {'experiment__study_id': instance.id} ) if instance.epi_meta: clear_cache( apps.get_model('epimeta', 'MetaResult'), {'protocol__study': instance.id} )
def disconnect_tasks_close_or_open_us_and_milestone_signals(): signals.pre_save.disconnect(sender=apps.get_model("tasks", "Task"), dispatch_uid="cached_prev_task") signals.post_save.disconnect(sender=apps.get_model("tasks", "Task"), dispatch_uid="try_to_close_or_open_us_and_milestone_when_create_or_edit_task") signals.post_delete.disconnect(sender=apps.get_model("tasks", "Task"), dispatch_uid="try_to_close_or_open_us_and_milestone_when_delete_task")
def confirm_data_migration(apps, schema_editor): WooeyFile = apps.get_model('wooey', 'WooeyFile') UserFile = apps.get_model('wooey', 'UserFile') Favorite = apps.get_model('wooey', 'Favorite') WooeyJob = apps.get_model('wooey', 'WooeyJob') ContentType = apps.get_model("contenttypes", "ContentType") userfile_type = ContentType.objects.get(model='userfile') # We should have one wooeyfile and 2 userfiles wooeyfiles = list(WooeyFile.objects.all()) userfiles = list(UserFile.objects.all()) assert len(wooeyfiles) == 1 assert len(userfiles) == 2 # Ensure favorites now point at UserFiles as well favorite = Favorite.objects.first() assert favorite.content_type_id == userfile_type.id assert favorite.object_id in [i.pk for i in userfiles] # Ensure both wooey jobs point at their respective userfiles for wooeyjob in WooeyJob.objects.all(): # There is one one file per wooey job assert wooeyjob.userfile_set.first().filename is not None # Assert the userfiles all point at the same wooeyfile wooeyfile = WooeyFile.objects.first() for userfile in UserFile.objects.all(): assert userfile.system_file_id == wooeyfile.id
def grant_permissions(self, proxy_model): """ Create the default permissions for the just added proxy model """ ContentType = apps.get_model('contenttypes', 'ContentType') try: Permission = apps.get_model('auth', 'Permission') except LookupError: return # searched_perms will hold the permissions we're looking for as (content_type, (codename, name)) searched_perms = [] ctype = ContentType.objects.get_for_model(proxy_model) for perm in self.default_permissions: searched_perms.append(( '{0}_{1}'.format(perm, proxy_model._meta.model_name), "Can {0} {1}".format(perm, proxy_model._meta.verbose_name_raw) )) all_perms = set(Permission.objects.filter( content_type=ctype, ).values_list( 'content_type', 'codename' )) permissions = [ Permission(codename=codename, name=name, content_type=ctype) for codename, name in searched_perms if (ctype.pk, codename) not in all_perms ] Permission.objects.bulk_create(permissions)
def _categories(self): age_mdl = apps.get_model("member", "Age") age_mdl.objects.all().delete() self.age_list = {} team_mdl = apps.get_model("member", "Team") team_mdl.objects.all().delete() self.team_list = {} activity_mdl = apps.get_model("member", "Activity") activity_mdl.objects.all().delete() self.activity_list = {} cur_a = self.old_db.open() cur_a.execute( "SELECT id,nom,ageMin,ageMax FROM fr_sdlibre_membres_ages") for ageid, nom, age_min, age_max in cur_a.fetchall(): self.print_debug("=> Age:%s", (nom,)) self.age_list[ageid] = age_mdl.objects.create( name=nom, minimum=age_min, maximum=age_max) cur_t = self.old_db.open() cur_t.execute( "SELECT id,nom, description, noactive FROM fr_sdlibre_membres_equipes") for teamid, nom, description, noactive in cur_t.fetchall(): self.print_debug( "=> Team:%s", (nom,)) self.team_list[teamid] = team_mdl.objects.create( name=nom, description=description, unactive=noactive == 'o') cur_y = self.old_db.open() cur_y.execute( "SELECT id,nom, description FROM fr_sdlibre_membres_activite") for activityid, nom, description in cur_y.fetchall(): self.print_debug( "=> Activity:%s", (nom,)) self.activity_list[activityid] = activity_mdl.objects.create( name=nom, description=description)
def _calc_account_balance(pk): account_cls = apps.get_model(settings.CONTRIB_ACCOUNT) transaction_cls = apps.get_model(settings.CONTRIB_TRANSACTIONITEM) account = account_cls.objects.get(pk=pk) pks = list(account_cls.objects.filter(parents=pk).values_list('pk', flat=True)) pks += [pk] credit = transaction_cls.objects.filter( account_id__in=pks, draft=False, credit=True, ).aggregate(Sum('amount')) debit = transaction_cls.objects.filter( account_id__in=pks, draft=False, credit=False, ).aggregate(Sum('amount')) value_credit = credit['amount__sum'] or Decimal(0) value_debit = debit['amount__sum'] or Decimal(0) if account.credit_increase(): account.balance = value_debit - value_credit else: account.balance = value_credit - value_debit account.save(update_parents=False) for obj in account.parents.all(): _calc_account_balance(obj.pk)
def loopNotice(): GameServer = apps.get_model('server', 'GameServer') allserver = GameServer.objects.all() now = datetime.datetime.now() notices = GameNotice.objects.all() for notice in notices: if notice.begin_at != None and notice.begin_at > now: continue if notice.end_at != None and notice.end_at < now: continue if notice.last_send_at != None and now < notice.last_send_at + datetime.timedelta(0, notice.interval): continue GameServer = apps.get_model('server', 'GameServer') servers = GameServer.get_servers(notice.servers) payload = dict() payload['type'] = 0 payload['contents'] = notice.content pdata = json.dumps(payload, ensure_ascii=False) print pdata for server in servers: sendNotice.delay(server, server.ip, server.gm_port, pdata) notice.last_send_at = now notice.save()
def post(self, request, *args, **kwargs): try: owner = request.user if not request.user.is_anonymous() else None image = request.FILES['file'] gallery_klass = apps.get_model(di_settings.CONFIG['DROPIMAGEGALLERY_MODEL'] or 'django_dropimages.DropimagesGallery') kwargs_dict = { 'gallery_identifier': request.GET['gallery_id'], di_settings.CONFIG['DROPIMAGEOWNER_FIELD'] or 'owner': owner } gallery, _ = gallery_klass.objects.get_or_create(**kwargs_dict) image_klass = apps.get_model(di_settings.CONFIG['DROPIMAGE_MODEL'] or 'django_dropimages.DropimagesImage') kwargs_dict = { 'dropimages_gallery': gallery, 'dropimages_original_filename': image._name, di_settings.CONFIG['DROPIMAGE_FIELD'] or 'image': image, di_settings.CONFIG['DROPIMAGEOWNER_FIELD'] or 'owner': owner } image_klass.objects.create(**kwargs_dict) return self.render_json_response({ 'gallery_identifier': request.GET['gallery_id'], 'gallery_pk': gallery.pk, 'file_name': image._name, }) except Exception, e: print str(e)
def build_handler(build_id): Build = apps.get_model('interface.Build') Result = apps.get_model('interface.Result') try: build = Build.objects.get(id=build_id) except Build.DoesNotExist: return 'Invalid build ID' auth = build.repo.user.get_auth() try: build.clone(auth) passing = build.lint() build.clean_directory() if not Result.objects.filter(build=build).exists(): build.set_status(auth, Build.CANCELLED) return 'Build cancelled' if passing: build.set_status(auth, Build.SUCCESS) else: build.set_status(auth, Build.ERROR) return 'Build finished' except Exception as e: print(e) build.set_status(auth, Build.CANCELLED) return 'Build failed'
def _season(self): season_mdl = apps.get_model("member", "Season") season_mdl.objects.all().delete() self.season_list = {} period_mdl = apps.get_model("member", "Period") period_mdl.objects.all().delete() self.period_list = {} doc_mdl = apps.get_model("member", "Document") doc_mdl.objects.all().delete() self.doc_list = {} cur_s = self.old_db.open() cur_s.execute( "SELECT id, designation,docNeed,courant FROM fr_sdlibre_membres_saisons") for seasonid, designation, doc_need, courant in cur_s.fetchall(): self.print_debug("=> SEASON %s", (designation,)) self.season_list[seasonid] = season_mdl.objects.create( designation=designation, iscurrent=courant == 'o') if doc_need is not None: doc_idx = 0 for doc_item in doc_need.split('|'): if doc_item != '': self.doc_list["%d_%d" % (seasonid, doc_idx)] = doc_mdl.objects.create( season=self.season_list[seasonid], name=doc_item) doc_idx += 1 cur_p = self.old_db.open() cur_p.execute( "SELECT id, saison,num,begin,end FROM fr_sdlibre_membres_periodSaisons") for periodid, saison, num, begin, end in cur_p.fetchall(): if saison in self.season_list.keys(): self.print_debug("=> PERIOD %s %d", (saison, num)) self.period_list[periodid] = period_mdl.objects.create( season=self.season_list[saison], num=num, begin_date=begin, end_date=end)
def record_detail_review(request): record_id = int(request.POST.get('record_id')) record = WorkRecord.objects.get(id=record_id) resident = record.resident service_item = record.service_item if service_item.service_group: item_alias = record.service_item.service_group.alias else: item_alias = record.service_item.alias if item_alias == 'body_exam_table' or item_alias == 'physical_examination': model_obj = apps.get_model(app_label='ehr', model_name=record.model_name) else: model_obj = apps.get_model(app_label=record.app_label, model_name=record.model_name) form = model_obj.objects.get(id=record.item_id) if item_alias == 'body_exam_table' \ or item_alias == 'physical_examination': template = 'ehr/body_exam_review.html' elif item_alias == 'constitution_identification': template = 'tcm/old_identify_review.html' elif record.app_label == 'vaccine' and record.service_item.alias != 'vaccine_card': template = 'vaccine/vaccine_review.html' elif record.app_label == 'psychiatric': template = 'psychiatric/psy_visit_review_content.html' elif record.app_label == 'education': template = 'education/activity_table_review.html' else: template = '%s/%s_review_content.html' % (record.app_label, item_alias) return render(request, template, {'form': form, 'resident': resident})
def disconnect_projects_signals(): signals.post_save.disconnect(sender=apps.get_model("projects", "Project"), dispatch_uid='project_post_save') signals.pre_save.disconnect(sender=apps.get_model("projects", "Project"), dispatch_uid="tags_normalization_projects") signals.pre_save.disconnect(sender=apps.get_model("projects", "Project"), dispatch_uid="update_project_tags_when_create_or_edit_taggable_item_projects")
import pghistory from django.apps import apps from django.contrib import admin @admin.register(apps.get_model("comments.ABCNotation")) class ABCNotationAdmin(admin.ModelAdmin): order_by = ("created_at",) list_display = ( "uuid", "text", "created_at", "user", ) def get_queryset(self, request): ABCNotation = apps.get_model("comments", "ABCNotation") qs = super().get_queryset(request) qs = qs.filter(format=ABCNotation.FORMAT_ABC_NOTATION) qs = qs.order_by("created_at") return qs def has_add_permission(self, request, obj=None): return False # NOTE: https://github.com/jyveapp/django-pghistory object_history_template = "admin/pghistory_template.html" def history_view(self, request, object_id, extra_context=None):
def to_python(self, value): if isinstance(value, ModelBase): return value app_label, model_name = value.lower().split('.') return apps.get_model(app_label, model_name)
def search(self, *args, **kwargs): """ Proxy to queryset's search method for the manager's model and any models that subclass from this manager's model if the model is abstract. """ if not settings.SEARCH_MODEL_CHOICES: # No choices defined - build a list of leaf models (those # without subclasses) that inherit from Displayable. models = [ m for m in apps.get_models() if issubclass(m, self.model) ] parents = reduce(ior, [set(m._meta.get_parent_list()) for m in models]) models = [m for m in models if m not in parents] elif getattr(self.model._meta, "abstract", False): # When we're combining model subclasses for an abstract # model (eg Displayable), we only want to use models that # are represented by the ``SEARCH_MODEL_CHOICES`` setting. # Now this setting won't contain an exact list of models # we should use, since it can define superclass models such # as ``Page``, so we check the parent class list of each # model when determining whether a model falls within the # ``SEARCH_MODEL_CHOICES`` setting. search_choices = set() models = set() parents = set() errors = [] for name in settings.SEARCH_MODEL_CHOICES: try: model = apps.get_model(*name.split(".", 1)) except LookupError: errors.append(name) else: search_choices.add(model) if errors: raise ImproperlyConfigured( "Could not load the model(s) " "%s defined in the 'SEARCH_MODEL_CHOICES' setting." % ", ".join(errors)) for model in apps.get_models(): # Model is actually a subclasses of what we're # searching (eg Displayabale) is_subclass = issubclass(model, self.model) # Model satisfies the search choices list - either # there are no search choices, model is directly in # search choices, or its parent is. this_parents = set(model._meta.get_parent_list()) in_choices = not search_choices or model in search_choices in_choices = in_choices or this_parents & search_choices if is_subclass and (in_choices or not search_choices): # Add to models we'll seach. Also maintain a parent # set, used below for further refinement of models # list to search. models.add(model) parents.update(this_parents) # Strip out any models that are superclasses of models, # specifically the Page model which will generally be the # superclass for all custom content types, since if we # query the Page model as well, we will get duplicate # results. models -= parents else: models = [self.model] all_results = [] user = kwargs.pop("for_user", None) for model in models: try: queryset = model.objects.published(for_user=user) except AttributeError: queryset = model.objects.get_queryset() all_results.extend( queryset.search(*args, **kwargs).annotate_scores()) return sorted(all_results, key=lambda r: r.result_count, reverse=True)
from django.shortcuts import render from rest_framework import viewsets from rest_framework.decorators import api_view from rest_framework.response import Response from rest_framework import status from .serializers import GeneraSerializer from django.apps import apps Genus = apps.get_model('orchiddb', 'Genus') class GeneraView(viewsets.ModelViewSet): queryset = Genus.objects.all() serializer_class = GeneraSerializer @api_view(['GET']) def view_genus(request): genera = Genus.objects.all() results = [genus.to_json() for genus in genera] return Response(results, status=status.HTTP_201_CREATED)
def get_model(self, model_or_label): if isinstance(model_or_label, ModelBase): return model_or_label else: return apps.get_model(*model_or_label.lower().split('.'))
def test_users_to_notify(): project = f.ProjectFactory.create() role1 = f.RoleFactory.create(project=project, permissions=['view_issues']) role2 = f.RoleFactory.create(project=project, permissions=[]) member1 = f.MembershipFactory.create(project=project, role=role1) member2 = f.MembershipFactory.create(project=project, role=role1) member3 = f.MembershipFactory.create(project=project, role=role1) member4 = f.MembershipFactory.create(project=project, role=role1) member5 = f.MembershipFactory.create(project=project, role=role2) inactive_member1 = f.MembershipFactory.create(project=project, role=role1) inactive_member1.user.is_active = False inactive_member1.user.save() system_member1 = f.MembershipFactory.create(project=project, role=role1) system_member1.user.is_system = True system_member1.user.save() issue = f.IssueFactory.create(project=project, owner=member4.user) policy_model_cls = apps.get_model("notifications", "NotifyPolicy") policy1 = policy_model_cls.objects.get(user=member1.user) policy2 = policy_model_cls.objects.get(user=member3.user) policy3 = policy_model_cls.objects.get(user=inactive_member1.user) policy3.notify_level = NotifyLevel.watch policy3.save() policy4 = policy_model_cls.objects.get(user=system_member1.user) policy4.notify_level = NotifyLevel.watch policy4.save() history = MagicMock() history.owner = member2.user history.comment = "" # Test basic description modifications issue.description = "test1" issue.save() users = services.get_users_to_notify(issue) assert len(users) == 1 assert tuple(users)[0] == issue.get_owner() # Test watch notify level in one member policy1.notify_level = NotifyLevel.watch policy1.save() users = services.get_users_to_notify(issue) assert len(users) == 2 assert users == {member1.user, issue.get_owner()} # Test with watchers issue.watchers.add(member3.user) users = services.get_users_to_notify(issue) assert len(users) == 3 assert users == {member1.user, member3.user, issue.get_owner()} # Test with watchers with ignore policy policy2.notify_level = NotifyLevel.ignore policy2.save() issue.watchers.add(member3.user) users = services.get_users_to_notify(issue) assert len(users) == 2 assert users == {member1.user, issue.get_owner()} # Test with watchers without permissions issue.watchers.add(member5.user) users = services.get_users_to_notify(issue) assert len(users) == 2 assert users == {member1.user, issue.get_owner()} # Test with inactive user issue.watchers.add(inactive_member1.user) assert len(users) == 2 assert users == {member1.user, issue.get_owner()} # Test with system user issue.watchers.add(system_member1.user) assert len(users) == 2 assert users == {member1.user, issue.get_owner()}
def get_queryset(self, request): ABCNotation = apps.get_model("comments", "ABCNotation") qs = super().get_queryset(request) qs = qs.filter(format=ABCNotation.FORMAT_ABC_NOTATION) qs = qs.order_by("created_at") return qs
def _copy_checking_settings_to_questionnaire(self, to_questionnaire): checking_settings = (apps.get_model( 'topics', 'TopicCheckingSettings').objects.filter(questionnaire=self)) for s in checking_settings: s.copy_to_questionnaire(to_questionnaire)
def user_orgs(user): # We load the organization model like this due to a circular import with the course # metadata models file Organization = apps.get_model('course_metadata', 'Organization') return Organization.user_organizations(user)
def _copy_checking_questions_to_questionnaire(self, to_questionnaire): checking_question_for_topics = (apps.get_model( 'topics', 'QuestionForTopic').objects.filter( scale_in_topic__topic__questionnaire=self)) for q in checking_question_for_topics: q.copy_to_questionnaire(to_questionnaire)
def _resolve_model(model): """Given an 'app.Model' string or model return a model.""" return apps.get_model(model) if isinstance(model, str) else model
from django.apps import apps from rest_flex_fields import FlexFieldsModelSerializer __all__ = [ 'AttributeValueSerializer', ] AttributeValue = apps.get_model(*'product.AttributeValue'.split()) class AttributeValueSerializer(FlexFieldsModelSerializer): """Serializer for :model:`product.AttributeValue`: `**Fields:**` 01. `attribute` : `ForeignKey` [:model:`product.Attribute`] 02. `id` : `AutoField` 03. `name` : `CharField` 04. `slug` : `SlugField` 05. `sort_order` : `PositiveIntegerField` 06. `value` : `CharField` `**Reverse Fields:**` 01. `translations` : `ForeignKey` [:model:`product.AttributeValueTranslation`] """ class Meta: model = AttributeValue fields = [ # Fields 'attribute', 'id', 'name',
def handle_pre_delete(app_name, model_name, instance_id): __handle_pre_delete( apps.get_model(app_name, model_name).objects.get(id=instance_id))
def _get_q_watchers(obj): obj_type = apps.get_model("contenttypes", "ContentType").objects.get_for_model(obj) return Q(watched__content_type=obj_type, watched__object_id=obj.id)
def get_model(self, model_name): if model_name in ['text', 'video', 'image', 'file']: return apps.get_model(app_label='courses', model_name=model_name) return None
def target_object(self): Model = apps.get_model(app_label='idgo_admin', model_name=self.model) return Model.objects.get(**{self.foreign_field: self.foreign_value})
def update_contributors_async(self, user_id): OSFUser = apps.get_model('osf.OSFUser') user = OSFUser.objects.get(id=user_id) p = Paginator(user.visible_contributor_to.order_by('id'), 100) for page_num in p.page_range: bulk_update_contributors(p.page(page_num).object_list)
def handle_m2m_changed(app_name, model_name, instance_id, action): instance = apps.get_model(app_name, model_name).objects.get(id=instance_id) if action in ('post_add', 'post_remove', 'post_clear'): __handle_save(instance) elif action in ('pre_remove', 'pre_clear'): __handle_pre_delete(instance)
def handle(self, *args, **options): database = options['database'] if not options['skip_checks']: self.check(databases=[database]) self.verbosity = options['verbosity'] self.interactive = options['interactive'] # Import the 'management' module within each installed app, to register # dispatcher events. for app_config in apps.get_app_configs(): if module_has_submodule(app_config.module, "management"): import_module('.management', app_config.name) # Get the database we're operating from connection = connections[database] # Hook for backends needing any database preparation connection.prepare_database() # Work out which apps have migrations and which do not executor = MigrationExecutor(connection, self.migration_progress_callback) # Raise an error if any migrations are applied before their dependencies. executor.loader.check_consistent_history(connection) # Before anything else, see if there's conflicting apps and drop out # hard if there are any conflicts = executor.loader.detect_conflicts() if conflicts: name_str = "; ".join( "%s in %s" % (", ".join(names), app) for app, names in conflicts.items() ) raise CommandError( "Conflicting migrations detected; multiple leaf nodes in the " "migration graph: (%s).\nTo fix them run " "'python manage.py makemigrations --merge'" % name_str ) # If they supplied command line arguments, work out what they mean. run_syncdb = options['run_syncdb'] target_app_labels_only = True if options['app_label']: # Validate app_label. app_label = options['app_label'] try: apps.get_app_config(app_label) except LookupError as err: raise CommandError(str(err)) if run_syncdb: if app_label in executor.loader.migrated_apps: raise CommandError("Can't use run_syncdb with app '%s' as it has migrations." % app_label) elif app_label not in executor.loader.migrated_apps: raise CommandError("App '%s' does not have migrations." % app_label) if options['app_label'] and options['migration_name']: migration_name = options['migration_name'] if migration_name == "zero": targets = [(app_label, None)] else: try: migration = executor.loader.get_migration_by_prefix(app_label, migration_name) except AmbiguityError: raise CommandError( "More than one migration matches '%s' in app '%s'. " "Please be more specific." % (migration_name, app_label) ) except KeyError: raise CommandError("Cannot find a migration matching '%s' from app '%s'." % ( migration_name, app_label)) targets = [(app_label, migration.name)] target_app_labels_only = False elif options['app_label']: targets = [key for key in executor.loader.graph.leaf_nodes() if key[0] == app_label] else: targets = executor.loader.graph.leaf_nodes() plan = executor.migration_plan(targets) exit_dry = plan and options['check_unapplied'] if options['plan']: self.stdout.write('Planned operations:', self.style.MIGRATE_LABEL) if not plan: self.stdout.write(' No planned migration operations.') for migration, backwards in plan: self.stdout.write(str(migration), self.style.MIGRATE_HEADING) for operation in migration.operations: message, is_error = self.describe_operation(operation, backwards) style = self.style.WARNING if is_error else None self.stdout.write(' ' + message, style) if exit_dry: sys.exit(1) return if exit_dry: sys.exit(1) # At this point, ignore run_syncdb if there aren't any apps to sync. run_syncdb = options['run_syncdb'] and executor.loader.unmigrated_apps # Print some useful info if self.verbosity >= 1: self.stdout.write(self.style.MIGRATE_HEADING("Operations to perform:")) if run_syncdb: if options['app_label']: self.stdout.write( self.style.MIGRATE_LABEL(" Synchronize unmigrated app: %s" % app_label) ) else: self.stdout.write( self.style.MIGRATE_LABEL(" Synchronize unmigrated apps: ") + (", ".join(sorted(executor.loader.unmigrated_apps))) ) if target_app_labels_only: self.stdout.write( self.style.MIGRATE_LABEL(" Apply all migrations: ") + (", ".join(sorted({a for a, n in targets})) or "(none)") ) else: if targets[0][1] is None: self.stdout.write( self.style.MIGRATE_LABEL(' Unapply all migrations: ') + str(targets[0][0]) ) else: self.stdout.write(self.style.MIGRATE_LABEL( " Target specific migration: ") + "%s, from %s" % (targets[0][1], targets[0][0]) ) pre_migrate_state = executor._create_project_state(with_applied_migrations=True) pre_migrate_apps = pre_migrate_state.apps emit_pre_migrate_signal( self.verbosity, self.interactive, connection.alias, apps=pre_migrate_apps, plan=plan, ) # Run the syncdb phase. if run_syncdb: if self.verbosity >= 1: self.stdout.write(self.style.MIGRATE_HEADING("Synchronizing apps without migrations:")) if options['app_label']: self.sync_apps(connection, [app_label]) else: self.sync_apps(connection, executor.loader.unmigrated_apps) # Migrate! if self.verbosity >= 1: self.stdout.write(self.style.MIGRATE_HEADING("Running migrations:")) if not plan: if self.verbosity >= 1: self.stdout.write(" No migrations to apply.") # If there's changes that aren't in migrations yet, tell them how to fix it. autodetector = MigrationAutodetector( executor.loader.project_state(), ProjectState.from_apps(apps), ) changes = autodetector.changes(graph=executor.loader.graph) if changes: self.stdout.write(self.style.NOTICE( " Your models have changes that are not yet reflected " "in a migration, and so won't be applied." )) self.stdout.write(self.style.NOTICE( " Run 'manage.py makemigrations' to make new " "migrations, and then re-run 'manage.py migrate' to " "apply them." )) fake = False fake_initial = False else: fake = options['fake'] fake_initial = options['fake_initial'] post_migrate_state = executor.migrate( targets, plan=plan, state=pre_migrate_state.clone(), fake=fake, fake_initial=fake_initial, ) # post_migrate signals have access to all models. Ensure that all models # are reloaded in case any are delayed. post_migrate_state.clear_delayed_apps_cache() post_migrate_apps = post_migrate_state.apps # Re-render models of real apps to include relationships now that # we've got a final state. This wouldn't be necessary if real apps # models were rendered with relationships in the first place. with post_migrate_apps.bulk_update(): model_keys = [] for model_state in post_migrate_apps.real_models: model_key = model_state.app_label, model_state.name_lower model_keys.append(model_key) post_migrate_apps.unregister_model(*model_key) post_migrate_apps.render_multiple([ ModelState.from_model(apps.get_model(*model)) for model in model_keys ]) # Send the post_migrate signal, so individual apps can do whatever they need # to do at this point. emit_post_migrate_signal( self.verbosity, self.interactive, connection.alias, apps=post_migrate_apps, plan=plan, )
def update_contenttypes(app_config, verbosity=2, interactive=True, using=DEFAULT_DB_ALIAS, apps=global_apps, **kwargs): """ Creates content types for models in the given app, removing any model entries that no longer have a matching model class. """ if not app_config.models_module: return app_label = app_config.label try: app_config = apps.get_app_config(app_label) ContentType = apps.get_model('contenttypes', 'ContentType') except LookupError: return if not router.allow_migrate_model(using, ContentType): return ContentType.objects.clear_cache() # Always clear the global content types cache. if apps is not global_apps: global_apps.get_model('contenttypes', 'ContentType').objects.clear_cache() app_models = { model._meta.model_name: model for model in app_config.get_models() } if not app_models: return # Get all the content types content_types = { ct.model: ct for ct in ContentType.objects.using(using).filter(app_label=app_label) } to_remove = [ ct for (model_name, ct) in six.iteritems(content_types) if model_name not in app_models ] cts = [ ContentType( app_label=app_label, model=model_name, ) for (model_name, model) in six.iteritems(app_models) if model_name not in content_types ] ContentType.objects.using(using).bulk_create(cts) if verbosity >= 2: for ct in cts: print("Adding content type '%s | %s'" % (ct.app_label, ct.model)) # Confirm that the content type is stale before deletion. using = router.db_for_write(ContentType) if to_remove: if interactive: ct_info = [] for ct in to_remove: ct_info.append(' - Content type for %s.%s' % (ct.app_label, ct.model)) collector = NoFastDeleteCollector(using=using) collector.collect([ct]) for obj_type, objs in collector.data.items(): if objs == {ct}: continue ct_info.append( ' - %s object%s of type %s.%s:' % (len(objs), 's' if len(objs) != 1 else '', obj_type._meta.app_label, obj_type._meta.model_name)) content_type_display = '\n'.join(ct_info) print( """Some content types in your database are stale and can be deleted. Any objects that depend on these content types will then also be deleted. The content types, and the dependent objects that would be deleted, are: %s This list does not include data that might be in your database outside of Django's models. Are you sure you want to delete these content types? If you're unsure, answer 'no'. """ % content_type_display) ok_to_delete = input("Type 'yes' to continue, or 'no' to cancel: ") else: ok_to_delete = False if ok_to_delete == 'yes': for ct in to_remove: if verbosity >= 2: print("Deleting stale content type '%s | %s'" % (ct.app_label, ct.model)) ct.delete() else: if verbosity >= 2: print("Stale content types remain.")
def _FindFirstCompoundMatch(matches): """Finds the first match that has a Compound as a value.""" for m in matches: if isinstance(m.value, apps.get_model('gibbs.Compound')): return m return None
def get_model(app_name, model_name): try: model = apps.get_model(app_name, model_name) return model except: return 'model not found'
def sync(cls): """ Synchronize the Autorisaties for all Applicaties. Invoke this method whenever a ZaakType/InformatieObjectType/BesluitType is created to set up the appropriate Autorisatie objects. This is best called as part of `transaction.on_commit`. """ from .utils import send_applicatie_changed_notification qs = cls.objects.select_related("applicatie").prefetch_related( "applicatie__autorisaties") to_delete = [] to_keep = [] to_add = [] for spec in qs: existing_autorisaties = [ autorisatie for autorisatie in spec.applicatie.autorisaties.all() if autorisatie.component == spec.component ] for autorisatie in existing_autorisaties: # schedule for deletion if existing objects differ from the spec if (autorisatie.max_vertrouwelijkheidaanduiding != spec.max_vertrouwelijkheidaanduiding): to_delete.append(autorisatie) continue if set(autorisatie.scopes) != set(spec.scopes): to_delete.append(autorisatie) continue to_keep.append(autorisatie) TypeModel = apps.get_model(COMPONENT_TO_MODEL[spec.component]) field = COMPONENT_TO_FIELD[spec.component] for obj in TypeModel.objects.all(): url = build_absolute_url(obj.get_absolute_api_url()) autorisatie = Autorisatie(applicatie=spec.applicatie, component=spec.component, scopes=spec.scopes, max_vertrouwelijkheidaanduiding=spec. max_vertrouwelijkheidaanduiding, **{field: url}) to_add.append(autorisatie) Autorisatie.objects.filter( pk__in=[autorisatie.pk for autorisatie in to_delete]) # de-duplicate - whatever is in to_keep should not be added again existing_urls = defaultdict(list) for autorisatie in to_keep: if autorisatie.component not in COMPONENT_TO_FIELD: continue url = getattr(autorisatie, COMPONENT_TO_FIELD[autorisatie.component]) existing_urls[autorisatie.component].append(url) _to_add = [] for autorisatie in to_add: if autorisatie.component not in COMPONENT_TO_FIELD: continue url = getattr(autorisatie, COMPONENT_TO_FIELD[autorisatie.component]) if url in existing_urls[autorisatie.component]: continue _to_add.append(autorisatie) # created the de-duplicated, missing autorisaties Autorisatie.objects.bulk_create(_to_add) # determine which notifications to send changed = { autorisatie.applicatie for autorisatie in (to_delete + _to_add) } for applicatie in changed: send_applicatie_changed_notification(applicatie)
def ready(self): super(MetadataApp, self).ready() from actstream import registry from .wizard_steps import WizardStepMetadata # NOQA Document = apps.get_model(app_label='documents', model_name='Document') DocumentPageResult = apps.get_model(app_label='documents', model_name='DocumentPageResult') DocumentType = apps.get_model(app_label='documents', model_name='DocumentType') DocumentMetadata = self.get_model(model_name='DocumentMetadata') DocumentTypeMetadataType = self.get_model( model_name='DocumentTypeMetadataType') MetadataType = self.get_model(model_name='MetadataType') Document.add_to_class(name='metadata_value_of', value=DocumentMetadataHelper.constructor) ModelAttribute( model=Document, name='metadata_value_of.< metadata type name >', description=_('Return the value of a specific document metadata'), label=_('Metadata value of')) ModelField(model=Document, name='metadata__metadata_type__name', label=_('Metadata type name')) ModelField(model=Document, name='metadata__value', label=_('Metadata type value')) ModelEventType.register(model=Document, event_types=( event_document_metadata_added, event_document_metadata_edited, event_document_metadata_removed, )) ModelEventType.register(model=MetadataType, event_types=( event_document_metadata_added, event_document_metadata_edited, event_document_metadata_removed, event_metadata_type_edited, event_metadata_type_relationship, )) ModelEventType.register( model=DocumentType, event_types=(event_metadata_type_relationship, )) ModelPermission.register(model=Document, permissions=( permission_document_metadata_add, permission_document_metadata_edit, permission_document_metadata_remove, permission_document_metadata_view, )) ModelPermission.register(model=MetadataType, permissions=(permission_acl_edit, permission_acl_view, permission_events_view, permission_metadata_type_delete, permission_metadata_type_edit, permission_metadata_type_view)) SourceColumn( source=Document, label=_('Metadata'), func=lambda context: get_metadata_string(context['object'])) SourceColumn(source=DocumentPageResult, label=_('Metadata'), func=lambda context: get_metadata_string(context['object'] .document)) SourceColumn(attribute='metadata_type', is_identifier=True, is_sortable=True, source=DocumentMetadata) SourceColumn(attribute='value', is_sortable=True, source=DocumentMetadata) SourceColumn(attribute='is_required', source=DocumentMetadata, widget=TwoStateWidget) SourceColumn(attribute='label', is_identifier=True, is_sortable=True, source=MetadataType) SourceColumn(attribute='name', is_sortable=True, source=MetadataType) document_search.add_model_field(field='metadata__metadata_type__name', label=_('Metadata type')) document_search.add_model_field(field='metadata__value', label=_('Metadata value')) document_page_search.add_model_field( field='document_version__document__metadata__metadata_type__name', label=_('Metadata type')) document_page_search.add_model_field( field='document_version__document__metadata__value', label=_('Metadata value')) menu_facet.bind_links(links=(link_metadata_view, ), sources=(Document, )) menu_list_facet.bind_links( links=(link_setup_document_type_metadata_types, ), sources=(DocumentType, )) menu_multi_item.bind_links(links=(link_metadata_multiple_add, link_metadata_multiple_edit, link_metadata_multiple_remove), sources=(Document, )) menu_list_facet.bind_links(links=( link_acl_list, link_setup_metadata_type_document_types, link_object_event_types_user_subcriptions_list, link_events_for_object, ), sources=(MetadataType, )) menu_object.bind_links(links=(link_setup_metadata_type_delete, link_setup_metadata_type_edit), sources=(MetadataType, )) menu_secondary.bind_links( links=(link_setup_metadata_type_list, link_setup_metadata_type_create), sources=(MetadataType, 'metadata:setup_metadata_type_list', 'metadata:setup_metadata_type_create')) menu_setup.bind_links(links=(link_setup_metadata_type_list, )) menu_secondary.bind_links( links=(link_metadata_add, link_metadata_edit, link_metadata_remove), sources=('metadata:metadata_add', 'metadata:metadata_edit', 'metadata:metadata_remove', 'metadata:metadata_view')) post_delete.connect( dispatch_uid= 'metadata_handler_post_document_type_metadata_type_delete', receiver=handler_post_document_type_metadata_type_delete, sender=DocumentTypeMetadataType) post_document_type_change.connect( dispatch_uid='metadata_handler_post_document_type_change_metadata', receiver=handler_post_document_type_change_metadata, sender=Document) post_save.connect( dispatch_uid= 'metadata_handler_post_document_type_metadata_type_add', receiver=handler_post_document_type_metadata_type_add, sender=DocumentTypeMetadataType) # Index updating post_delete.connect( dispatch_uid='metadata_handler_index_document_delete', receiver=handler_index_document, sender=DocumentMetadata) post_save.connect(dispatch_uid='metadata_handler_index_document_save', receiver=handler_index_document, sender=DocumentMetadata) registry.register(MetadataType) registry.register(DocumentTypeMetadataType)
def resolve(self, context=None, request=None, resolved_object=None): if not context and not request: raise ImproperlyConfigured( 'Must provide a context or a request in order to resolve the ' 'link.') AccessControlList = apps.get_model(app_label='acls', model_name='AccessControlList') if not context: context = RequestContext(request=request) if not request: # Try to get the request object the faster way and fallback to the # slower method. try: request = context.request except AttributeError: request = Variable('request').resolve(context) current_path = request.META['PATH_INFO'] current_view_name = resolve(current_path).view_name # ACL is tested agains the resolved_object or just {{ object }} if not if not resolved_object: try: resolved_object = Variable('object').resolve(context=context) except VariableDoesNotExist: pass # If this link has a required permission check that the user has it # too if self.permissions: if resolved_object: try: AccessControlList.objects.check_access( obj=resolved_object, permissions=self.permissions, user=request.user) except PermissionDenied: return None else: try: Permission.check_user_permissions( permissions=self.permissions, user=request.user) except PermissionDenied: return None # Check to see if link has conditional display function and only # display it if the result of the conditional display function is # True if self.condition: if not self.condition(context): return None resolved_link = ResolvedLink(current_view_name=current_view_name, link=self) if self.view: view_name = Variable('"{}"'.format(self.view)) if isinstance(self.args, list) or isinstance(self.args, tuple): # TODO: Don't check for instance check for iterable in try/except # block. This update required changing all 'args' argument in # links.py files to be iterables and not just strings. args = [Variable(arg) for arg in self.args] else: args = [Variable(self.args)] # If we were passed an instance of the view context object we are # resolving, inject it into the context. This help resolve links for # object lists. if resolved_object: context['resolved_object'] = resolved_object try: kwargs = self.kwargs(context) except TypeError: # Is not a callable kwargs = self.kwargs kwargs = {key: Variable(value) for key, value in kwargs.items()} # Use Django's exact {% url %} code to resolve the link node = URLNode(view_name=view_name, args=args, kwargs=kwargs, asvar=None) try: resolved_link.url = node.render(context) except Exception as exception: logger.error('Error resolving link "%s" URL; %s', self.text, exception) elif self.url: resolved_link.url = self.url # This is for links that should be displayed but that are not clickable if self.conditional_disable: resolved_link.disabled = self.conditional_disable(context) else: resolved_link.disabled = False # Lets a new link keep the same URL query string of the current URL if self.keep_query: # Sometimes we are required to remove a key from the URL QS parsed_url = furl( force_str(request.get_full_path() or request.META.get( 'HTTP_REFERER', reverse(setting_home_view.value)))) for key in self.remove_from_query: try: parsed_url.query.remove(key) except KeyError: pass # Use the link's URL but with the previous URL querystring new_url = furl(resolved_link.url) new_url.args = parsed_url.querystr resolved_link.url = new_url.url resolved_link.context = context return resolved_link
def latest_plan(): Plan = apps.get_model('actions', 'Plan') if Plan.objects.exists(): return Plan.objects.latest() else: return None
def get_document_model(cls): # This method should use "Document" with local app label and get_model function to load the model return apps.get_model("{}.Document".format(cls._meta.app_label))
def handle(self, *args, **options): """Runs the management command.""" activate(settings.LANGUAGE_CODE) verbosity = int(options.get("verbosity", 1)) # see if we're asked to use a specific search engine if options.get('engine'): engine_slug = options['engine'] engine_selected = True else: engine_slug = "default" engine_selected = False # work-around for legacy optparser hack in BaseCommand. In Django=1.10 the # args are collected in options['apps'], but in earlier versions they are # kept in args. if len(options['apps']): args = options['apps'] # get the search engine we'll be checking registered models for, may be "default" search_engine = get_engine(engine_slug) models = [] for model_name in args: try: model = apps.get_model( *model_name.split(".")) # app label, model name except TypeError: # were we given only model name without app_name? registered_models = search_engine.get_registered_models() matching_models = [ x for x in registered_models if x.__name__ == model_name ] if len(matching_models) > 1: raise CommandError( "Model name \"%s\" is not unique, cannot continue!" % model_name) if matching_models: model = matching_models[0] else: model = None if model is None or not search_engine.is_registered(model): raise CommandError( "Model \"%s\" is not registered with django-watson search engine \"%s\"!" % (force_text(model_name), force_text(engine_slug))) models.append(model) refreshed_model_count = 0 if models: # request for (re-)building index for a subset of registered models if verbosity >= 3: print("Using search engine \"%s\"" % engine_slug) for model in models: refreshed_model_count += rebuild_index_for_model( model, engine_slug, verbosity) else: # full rebuild (for one or all search engines) if engine_selected: engine_slugs = [engine_slug] if verbosity >= 2: # let user know the search engine if they selected one print( "Rebuilding models registered with search engine \"%s\"" % force_text(engine_slug)) else: # loop through all engines engine_slugs = [ x[0] for x in SearchEngine.get_created_engines() ] for engine_slug in engine_slugs: search_engine = get_engine(engine_slug) registered_models = search_engine.get_registered_models() # Rebuild the index for all registered models. for model in registered_models: refreshed_model_count += rebuild_index_for_model( model, engine_slug, verbosity) # Clean out any search entries that exist for stale content types. Only do it during full rebuild valid_content_types = [ ContentType.objects.get_for_model(model) for model in registered_models ] stale_entries = SearchEntry.objects.filter( engine_slug=engine_slug, ).exclude( content_type__in=valid_content_types) stale_entry_count = stale_entries.count() if stale_entry_count > 0: stale_entries.delete() if verbosity >= 1: print( "Deleted {stale_entry_count} stale search entry(s) in {engine_slug!r} search engine." .format( stale_entry_count=stale_entry_count, engine_slug=force_text(engine_slug), )) if verbosity == 1: print( "Refreshed {refreshed_model_count} search entry(s) in {engine_slug!r} search engine." .format( refreshed_model_count=refreshed_model_count, engine_slug=force_text(engine_slug), ))
def on_node_updated(node_id, user_id, first_save, saved_fields, request_headers=None): # WARNING: Only perform Read-Only operations in an asynchronous task, until Repeatable Read/Serializable # transactions are implemented in View and Task application layers. AbstractNode = apps.get_model('osf.AbstractNode') node = AbstractNode.load(node_id) if node.is_collection or node.archiving: return need_update = bool(node.SEARCH_UPDATE_FIELDS.intersection(saved_fields)) # due to async nature of call this can issue a search update for a new record (acceptable trade-off) if bool({'spam_status', 'is_deleted'}.intersection(saved_fields)): need_update = True elif not node.is_public and 'is_public' not in saved_fields: need_update = False if need_update: node.update_search() if settings.SHARE_URL: if not settings.SHARE_API_TOKEN: return logger.warning( 'SHARE_API_TOKEN not set. Could not send %s to SHARE.'. format(node)) if node.is_registration: on_registration_updated(node) else: resp = requests.post( '{}api/normalizeddata/'.format(settings.SHARE_URL), json={ 'data': { 'type': 'NormalizedData', 'attributes': { 'tasks': [], 'raw': None, 'data': { '@graph': [{ '@id': '_:123', '@type': 'workidentifier', 'creative_work': { '@id': '_:789', '@type': 'project' }, 'uri': '{}{}/'.format(settings.DOMAIN, node._id), }, { '@id': '_:789', '@type': 'project', 'is_deleted': not node.is_public or node.is_deleted or node.is_spammy, }] } } } }, headers={ 'Authorization': 'Bearer {}'.format(settings.SHARE_API_TOKEN), 'Content-Type': 'application/vnd.api+json' }) logger.debug(resp.content) resp.raise_for_status()
def annotations(self): Annotation = apps.get_model("{}.Annotation".format( self._meta.app_label)) return Annotation.objects.filter( reference__in=self.documents.values("reference"))