class SubmissionRevision(Model): CREATED = 'created' CHANGED = 'changed' DELETED = 'deleted' TYPES = ( (CREATED, _('Created')), (CHANGED, _('Changed')), (DELETED, _('Deleted')), ) type = CharField(max_length=7, choices=TYPES) created_at = DateTimeField(auto_now_add=True) submission_ct = ForeignKey('contenttypes.ContentType', on_delete=CASCADE) submission_id = TextField() submission = GenericForeignKey('submission_ct', 'submission_id') data = TextField() summary = TextField() objects = SubmissionRevisionQuerySet.as_manager() class Meta: ordering = ('-created_at', ) @staticmethod def get_filters_for(submission): return { 'submission_ct': ContentType.objects.get_for_model(submission._meta.model), 'submission_id': str(submission.pk), } @classmethod def diff_summary(cls, page, data1, data2): diff = [] data_fields = page.get_data_fields() hidden_types = (tuple, list, dict) for k, label in data_fields: value1 = data1.get(k) value2 = data2.get(k) if value2 == value1 or not value1 and not value2: continue is_hidden = (isinstance(value1, hidden_types) or isinstance(value2, hidden_types)) # Escapes newlines as they are used as separator inside summaries. if isinstance(value1, str): value1 = value1.replace('\n', r'\n') if isinstance(value2, str): value2 = value2.replace('\n', r'\n') if value2 and not value1: diff.append(((_('“%s” set.') % label) if is_hidden else (_('“%s” set to “%s”.')) % (label, value2))) elif value1 and not value2: diff.append(_('“%s” unset.') % label) else: diff.append(((_('“%s” changed.') % label) if is_hidden else (_('“%s” changed from “%s” to “%s”.') % (label, value1, value2)))) return '\n'.join(diff) @classmethod def create_from_submission(cls, submission, revision_type): page = submission.form_page try: previous = cls.objects.for_submission(submission).latest( 'created_at') except cls.DoesNotExist: previous_data = {} else: previous_data = previous.get_data() filters = cls.get_filters_for(submission) data = submission.get_data(raw=True, add_metadata=False) data['status'] = submission.status if revision_type == cls.CREATED: summary = _('Submission created.') elif revision_type == cls.DELETED: summary = _('Submission deleted.') else: summary = cls.diff_summary(page, previous_data, data) if not summary: # Nothing changed. return filters.update( type=revision_type, data=json.dumps(data, cls=StreamFormJSONEncoder), summary=summary, ) return cls.objects.create(**filters) def get_data(self): return json.loads(self.data)
class Schedule(TimeStampedModel): STATUS_CHOICES = ( (0, 'Pendente'), (1, 'Confirmado'), (2, 'Faltou'), (3, 'Cancelou'), ) NOTIFICATION_STATUS_CHOICES = ((0, 'Pending'), (1, 'Success'), (2, 'Error'), (3, 'Expired'), (4, 'Unknown')) class Meta: verbose_name = 'Agendamento' verbose_name_plural = 'Agendamentos' # Model Fields patient = ForeignKey(Patient, on_delete=CASCADE) dentist = ForeignKey(Dentist, on_delete=CASCADE) date = DateTimeField('Data') duration = IntegerField('Duração') notification_status = IntegerField('Status da notificação', default=0) notification_task_id = CharField('ID Notificação', max_length=50, default=None, null=True) status = IntegerField('Status do agendamento', choices=STATUS_CHOICES, default=0) def get_message(self) -> str: local_date = self.date.astimezone(settings.TZ) now = datetime.now(tz=settings.TZ).date() if (local_date.date() - now).days == 0: schedule_date = 'hoje' elif (local_date.date() - now).days == 1: schedule_date = 'amanhã' else: schedule_date = local_date.strftime("dia %d/%m") message = "Olá {patient_prefix} {patient_name}, " \ "não se esqueça de sua consulta odontológica " \ "{schedule_date} às {schedule_time}.".format( patient_prefix=self.patient.get_sex_prefix(), patient_name=self.patient.name, dentist_prefix=self.dentist.get_sex_prefix(), dentist_name=self.dentist.first_name, schedule_date=schedule_date, schedule_time=local_date.strftime("%H:%M")) return message def delete(self, using=None, keep_parents=False): self.revoke_notification() return super().delete(using, keep_parents) def revoke_notification(self): celery_app.control.revoke(self.notification_task_id) def create_notification(self): if date.today() > self.date.date(): self.notification_status = self.NOTIFICATION_STATUS_CHOICES[3][0] else: start_time = settings.MESSAGE_ETA end_time = settings.MESSAGE_EXPIRES msg_datetime = self.date.astimezone(TZ).replace( **start_time) - timedelta(days=1) msg_expires = msg_datetime.replace(**end_time) message = send_message.apply_async((self.id, ), eta=msg_datetime, expires=msg_expires) if self.notification_task_id: self.revoke_notification() self.notification_task_id = message.id self.notification_status = self.NOTIFICATION_STATUS_CHOICES[0][0]
def get(self, request, organization): """ Fetches alert rules and legacy rules for an organization """ project_ids = self.get_requested_project_ids(request) or None if project_ids == {-1}: # All projects for org: project_ids = Project.objects.filter( organization=organization).values_list("id", flat=True) elif project_ids is None: # All projects for user org_team_list = Team.objects.filter( organization=organization).values_list("id", flat=True) user_team_list = OrganizationMemberTeam.objects.filter( organizationmember__user=request.user, team__in=org_team_list).values_list("team", flat=True) project_ids = Project.objects.filter( teams__in=user_team_list).values_list("id", flat=True) # Materialize the project ids here. This helps us to not overwhelm the query planner with # overcomplicated subqueries. Previously, this was causing Postgres to use a suboptimal # index to filter on. project_ids = list(project_ids) teams = request.GET.getlist("team", []) team_filter_query = None if len(teams) > 0: try: teams_query, unassigned = parse_team_params( request, organization, teams) except InvalidParams as err: return Response(str(err), status=status.HTTP_400_BAD_REQUEST) team_filter_query = Q( owner_id__in=teams_query.values_list("actor_id", flat=True)) if unassigned: team_filter_query = team_filter_query | Q(owner_id=None) alert_rules = AlertRule.objects.fetch_for_organization( organization, project_ids) if not features.has("organizations:performance-view", organization): # Filter to only error alert rules alert_rules = alert_rules.filter( snuba_query__dataset=Dataset.Events.value) issue_rules = Rule.objects.filter( status__in=[RuleStatus.ACTIVE, RuleStatus.INACTIVE], project__in=project_ids) name = request.GET.get("name", None) if name: alert_rules = alert_rules.filter(Q(name__icontains=name)) issue_rules = issue_rules.filter(Q(label__icontains=name)) if team_filter_query: alert_rules = alert_rules.filter(team_filter_query) issue_rules = issue_rules.filter(team_filter_query) expand = request.GET.getlist("expand", []) if "latestIncident" in expand: alert_rules = alert_rules.annotate(incident_id=Coalesce( Subquery( Incident.objects.filter(alert_rule=OuterRef( "pk")).order_by("-date_started").values("id")[:1]), Value("-1"), )) is_asc = request.GET.get("asc", False) == "1" sort_key = request.GET.getlist("sort", ["date_added"]) rule_sort_key = [ "label" if x == "name" else x for x in sort_key ] # Rule's don't share the same field name for their title/label/name...so we account for that here. case_insensitive = sort_key == ["name"] if "incident_status" in sort_key: alert_rules = alert_rules.annotate(incident_status=Coalesce( Subquery( Incident.objects.filter(alert_rule=OuterRef( "pk")).order_by("-date_started").values("status")[:1]), Value(-1, output_field=IntegerField()), )) issue_rules = issue_rules.annotate( incident_status=Value(-2, output_field=IntegerField())) if "date_triggered" in sort_key: far_past_date = Value(make_aware(datetime.min), output_field=DateTimeField()) alert_rules = alert_rules.annotate(date_triggered=Coalesce( Subquery( Incident.objects.filter(alert_rule=OuterRef("pk")). order_by("-date_started").values("date_started")[:1]), far_past_date, ), ) issue_rules = issue_rules.annotate(date_triggered=far_past_date) alert_rule_intermediary = CombinedQuerysetIntermediary( alert_rules, sort_key) rule_intermediary = CombinedQuerysetIntermediary( issue_rules, rule_sort_key) return self.paginate( request, paginator_cls=CombinedQuerysetPaginator, on_results=lambda x: serialize( x, request.user, CombinedRuleSerializer(expand=expand)), default_per_page=25, intermediaries=[alert_rule_intermediary, rule_intermediary], desc=not is_asc, cursor_cls=StringCursor if case_insensitive else Cursor, case_insensitive=case_insensitive, )
class Showable(Model): name = CharField(default='!%', max_length=10) description = CharField(default='', max_length=140) image = ImageField() user = ForeignKey(User, related_name='+', on_delete=CASCADE) date = DateTimeField(auto_now_add=True)
class ChatMessageModel(Model): """ This class represents a chat message. It has a owner (user), timestamp and the message body. """ user = ForeignKey(get_user_model(), on_delete=CASCADE, verbose_name='user', related_name='from_user', db_index=True) recipient = ForeignKey(get_user_model(), on_delete=CASCADE, verbose_name='recipient', related_name='to_user', db_index=True, null=True, blank=True) created = DateTimeField(auto_now_add=True, editable=False, db_index=True) read_date = DateTimeField(editable=False, null=True, blank=True) room = CharField(max_length=150, null=True, blank=True) body = TextField('body') broadcast = BooleanField(default=False) def __str__(self): return str(self.id) def characters(self): """ Toy function to count body characters. :return: body's char number """ return len(self.body) def notify_single_client(self, sender, recipient): """ Inform client there is a new message. """ channel_layer = get_channel_layer() sender_channel = UserChannel.objects.filter(user__pk=sender.pk, room=self.room).first() recipient_channel = UserChannel.objects.filter(user__pk=recipient.pk, room=self.room).first() notification = { 'type': 'receive', 'message': self.id, 'user_fullname': '{} {}'.format(self.user.first_name, self.user.last_name), 'is_operator': chat_operator(self.user, self.room), 'operator_status': sender_channel.status if sender_channel else True } # print(notification) if sender_channel and sender_channel.channel: async_to_sync(channel_layer.send)(sender_channel.channel, notification) if recipient_channel and recipient_channel.channel: async_to_sync(channel_layer.send)(recipient_channel.channel, notification) def notify_ws_clients(self): """ Inform client there is a new message. """ channel_layer = get_channel_layer() sender_channel = UserChannel.objects.filter(user=self.user, room=self.room).first() notification = { 'type': 'receive_group_message', 'message': '{}'.format(self.id), 'user_fullname': '{} {}'.format(self.user.first_name, self.user.last_name), 'is_operator': chat_operator(self.user, self.room), 'operator_status': sender_channel.status if sender_channel else True } channel_layer = get_channel_layer() async_to_sync(channel_layer.group_send)(self.room, notification) def save(self, *args, **kwargs): """ Trims white spaces, saves the message and notifies the recipient via WS if the message is new. """ # broadcast only for staff users if self.broadcast and not chat_operator(self.user, self.room): return False new = self.id self.body = self.body.strip() # Trimming whitespaces from the body # Escape text to avoi XSS attack and render hrefs self.body = get_text_with_hrefs(strip_tags(self.body)) super(ChatMessageModel, self).save(*args, **kwargs) channel = UserChannel.objects.filter(user=self.user, room=self.room).first() if channel: channel.save(update_fields=['last_seen']) if not new: if self.broadcast: self.notify_ws_clients() else: # notify sender and recipient self.notify_single_client(sender=self.user, recipient=self.recipient) # notify sender # self.notify_single_client(recipient=self.user) # Meta class Meta: app_label = 'chat' verbose_name = 'message' verbose_name_plural = 'messages' ordering = ('-created', )
class MonitorQueueElement(Model): sequence = ForeignKey(Sequence, on_delete=CASCADE) queued = DateTimeField(auto_now=True) monitor_setting = CharField(max_length=1, choices=MONITOR_SETTINGS)
class Unit(Model): name = CharField(_('name'), max_length=64, unique=True, db_index=True) # who has the right to see detailed data for this unit? owner = ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_('owner'), related_name='owns_unit', null=True, blank=True) # which users belong to this unit? members = ManyToManyField(settings.AUTH_USER_MODEL, verbose_name=_('members'), related_name='units', blank=True, db_index=True) # which qualifications are relevant for this unit? qualifications = ManyToManyField(Qualification, verbose_name=_('qualifications'), related_name='units', blank=True, db_index=True) date_created = DateTimeField(_('created (date)'), null=False, db_index=True, auto_now_add=True) date_modified = DateTimeField(_('modified (date)'), null=False, db_index=True, auto_now=True) objects = UnitManager() def __unicode__(self): return self.name def per_qualification_statistics(self): """Qualification statistics on a unit level. Returns a list of... """ # TODO Should be limited to the qualifications belonging to # TODO the unit or not? stats = [] n_members = self.members.count() for q in self.qualifications.all(): criteria_ids = q.criteria.values_list('id', flat=True) print self.members \ .filter(met_criteria__id__in=criteria_ids) \ .distinct() n_users_met_qualification = self.members \ .filter(met_criteria__id__in=criteria_ids) \ .distinct() \ .count() stats.append((q, n_users_met_qualification, n_members)) return stats def per_user_statistics(self): # TODO Should be limited to the qualifications belonging to # TODO the unit? pass class Meta: verbose_name = _('unit') verbose_name_plural = _('units')
class Cuestionarios(Model): id_cuestionario = CharField(max_length=200) id_usuario = ForeignKey(Usuarios, on_delete=CASCADE) fecha_aplicacion = DateTimeField()
def with_assigned_at(self, user): return self.annotate(assigned_at=Case( When(usernews__user=user, then=F("usernews__created_at")), output_field=DateTimeField(), default=Value(None), ))
class Arrange(Model): """案件處理 * case: 案件 * title: 處理標題 * state: 處理狀態 * content: 處理內容 * arrange_time: 處理時間 * publish_time: 發布時間 * update_time: 上次更新時間 """ state = FSMField(default=State.DRAFT, verbose_name=_('Arrange State'), choices=State.CHOICES) case = ForeignKey('cases.Case', on_delete=CASCADE, related_name='arranges', verbose_name=_('Case')) title = CharField(max_length=120, verbose_name=_('Arrange Title')) content = RichTextField(verbose_name=_('Content')) arrange_time = DateTimeField(null=True, blank=True, verbose_name=_('Arrange Time')) publish_time = DateTimeField(null=True, blank=True, verbose_name=_('Arrange Publish Time')) update_time = DateTimeField(auto_now=True, null=True, blank=True, verbose_name=_('Updated Time')) order = PositiveIntegerField(null=True, blank=True) class Meta: verbose_name = _('Arrange') verbose_name_plural = _('Arrange') ordering = ('-arrange_time', ) def __str__(self): return f'{self.case.number}-{self.title}' def format_arrange_time(self, format_='SHORT_DATETIME_FORMAT'): return formats.date_format(self.arrange_time, format_) def html_content(self): return mark_safe(self.content) html_content.short_description = _('Content') html_content.allow_tags = True @property def published(self): return self.state in ['published'] @property def email_content(self): """將圖片取代為連結""" soup = BeautifulSoup(self.content, features="html.parser") for img in soup.find_all('img'): link = img['src'] a = soup.new_tag('a', href=link, style="color:red;") a.string = '圖片連結' img.replaceWith(a) return str(soup) ######################################################## # Transition Conditions # These must be defined prior to the actual transitions # to be referenced. def can_publish(self): return self.case.state != 'draft' and self.arrange_time is not None can_publish.hint = '案件為處理中且處理時間不為空才能發布' ######################################################## # Workflow (state) Transitions def send(self): origin = self.case.first_history template = SendGridMailTemplate.objects.get(name='進度報告') data = { 'number': self.case.number, 'username': origin.username, 'case_title': origin.title, 'title': self.title, 'datetime': self.format_arrange_time(), 'content': self.email_content, } SendGridMail.objects.create(case=self.case, template=template, from_email=settings.SERVER_EMAIL, to_email=self.case.email, data=data) @transition(field=state, source=State.DRAFT, target=State.PUBLISHED, conditions=[can_publish], permission=lambda instance, user: user.has_perm( 'cases.change_arrange'), custom={'button_name': '發布'}) def publish(self): self.send() self.publish_time = timezone.now()
class WikiPage(Model): name = CharField(max_length=50) last_modified = DateTimeField(auto_now=True)
class UserProfile(CleanSave, Model): """A User profile to store MAAS specific methods and fields. :ivar user: The related User_. .. _User: https://docs.djangoproject.com/ en/dev/topics/auth/ #storing-additional-information-about-users """ class Meta(DefaultMeta): """Needed for South to recognize this model.""" objects = UserProfileManager() user = OneToOneField(User, on_delete=CASCADE) # Set to true when the user has completed the intro page of the Web UI. completed_intro = BooleanField(default=False) auth_last_check = DateTimeField(null=True, auto_now_add=True) def delete(self): # check owned resources owned_resources = [('staticipaddress', 'static IP address(es)'), ('iprange', 'IP range(s)'), ('node', 'node(s)')] messages = [] for attr, title in owned_resources: count = getattr(self.user, attr + '_set').count() if count: messages.append('{} {}'.format(count, title)) if messages: raise CannotDeleteUserException( 'User {} cannot be deleted: {} are still allocated'.format( self.user.username, ', '.join(messages))) if self.user.filestorage_set.exists(): self.user.filestorage_set.all().delete() self.user.consumers.all().delete() self.user.delete() super(UserProfile, self).delete() def transfer_resources(self, new_owner): """Transfer owned resources to another user. Nodes, static IP addresses and IP ranges owned by the user are transfered to the new owner. :param new_owner: the UserProfile to transfer ownership to. :type new_owner: maasserver.models.UserProfile """ user_pools = ResourcePool.objects.get_user_resource_pools(new_owner) nodes = self.user.node_set if nodes.exists() and nodes.exclude(pool__in=user_pools).exists(): raise ValidationError( "Can't transfer machines to new user," " user missing access target resource pool(s)") self.user.node_set.update(owner=new_owner) self.user.staticipaddress_set.update(user=new_owner) self.user.iprange_set.update(user=new_owner) def get_authorisation_tokens(self): """Fetches all the user's OAuth tokens. :return: A QuerySet of the tokens. :rtype: django.db.models.query.QuerySet_ .. _django.db.models.query.QuerySet: https://docs.djangoproject.com/ en/dev/ref/models/querysets/ """ # Avoid circular imports. from maasserver.models.user import get_auth_tokens return get_auth_tokens(self.user) def create_authorisation_token(self, consumer_name=None): """Create a new Token and its related Consumer (OAuth authorisation). :return: A tuple containing the Consumer and the Token that were created. :rtype: tuple """ # Avoid circular imports. from maasserver.models.user import create_auth_token token = create_auth_token(self.user, consumer_name) return token.consumer, token def delete_authorisation_token(self, token_key): """Delete the user's OAuth token wich key token_key. :param token_key: The key of the token to be deleted. :type token_key: string :raises: `django.http.Http404` """ token = get_object_or_404(Token, user=self.user, token_type=Token.ACCESS, key=token_key) token.consumer.delete() token.delete() def modify_consumer_name(self, token_key, consumer_name): """Modify consumer name of an existing token key. :param token_key: The key of the token to be deleted. :type token_key: string :param consumer_name: Name of the token consumer. :type consumer_name: string :raises: `django.http.Http404` """ token = get_object_or_404(Token, user=self.user, token_type=Token.ACCESS, key=token_key) token.consumer.name = consumer_name token.consumer.save() token.save() def __str__(self): return self.user.username
class CremeUser(AbstractBaseUser): # NB: auth.models.AbstractUser.username max_length == 150 (since django 1.10) => increase too ? username = CharField(_('Username'), max_length=30, unique=True, help_text=_('Required. 30 characters or fewer. ' 'Letters, digits and @/./+/-/_ only.' ), validators=[RegexValidator(re_compile(r'^[\w.@+-]+$'), _('Enter a valid username. ' 'This value may contain only letters, numbers, ' 'and @/./+/-/_ characters.'), 'invalid', ), ], error_messages={ 'unique': _('A user with that username already exists.'), }, ) last_name = CharField(_('Last name'), max_length=100, blank=True) first_name = CharField(_('First name'), max_length=100, blank=True)\ .set_tags(viewable=False) # NB: blank=True for teams email = EmailField(_('Email address'), blank=True) date_joined = DateTimeField(_('Date joined'), default=now).set_tags(viewable=False) is_active = BooleanField(_('Active?'), default=True, # help_text=_('Designates whether this user should be treated as ' # 'active. Deselect this instead of deleting accounts.' # ), TODO ).set_tags(viewable=False) is_staff = BooleanField(_('Is staff?'), default=False, # help_text=_('Designates whether the user can log into this admin site.'), TODO ).set_tags(viewable=False) is_superuser = BooleanField(_('Is a superuser?'), default=False, # help_text=_('If True, can create groups & events.') TODO ).set_tags(viewable=False) role = ForeignKey(UserRole, verbose_name=_('Role'), null=True, on_delete=PROTECT, ).set_tags(viewable=False) is_team = BooleanField(verbose_name=_('Is a team?'), default=False).set_tags(viewable=False) teammates_set = ManyToManyField('self', verbose_name=_('Teammates'), symmetrical=False, related_name='teams_set', ).set_tags(viewable=False) time_zone = CharField(_('Time zone'), max_length=50, default=settings.TIME_ZONE, choices=[(tz, tz) for tz in pytz.common_timezones], ).set_tags(viewable=False) theme = CharField(_('Theme'), max_length=50, default=settings.THEMES[0][0], choices=settings.THEMES, ).set_tags(viewable=False) # NB: do not use directly ; use the property 'settings' json_settings = TextField(editable=False, default='{}').set_tags(viewable=False) # TODO: JSONField ? objects = CremeUserManager() USERNAME_FIELD = 'username' REQUIRED_FIELDS = ['first_name', 'last_name', 'email'] creation_label = _('Create a user') save_label = _('Save the user') _settings = None _teams = None _teammates = None class Meta: # abstract = True TODO: class AbstractCremeUser ? ordering = ('username',) verbose_name = _('User') verbose_name_plural = _('Users') app_label = 'creme_core' def __str__(self): return self.get_full_name() def get_full_name(self): if self.is_team: return ugettext('{user} (team)').format(user=self.username) # TODO: we could also check related contact to find first_name, last_name first_name = self.first_name last_name = self.last_name if first_name and last_name: return ugettext('{first_name} {last_name}.').format( first_name=first_name, last_name=last_name[0], ) else: return self.username def get_short_name(self): return self.username # TODO: def clean() ?? (team + role= None etc...) # TODO find where forms are imported, making that method called BEFORE User has been contributed # @staticmethod # def get_common_ones(): # return User.objects.filter(is_staff=False) @property def settings(self): settings = self._settings if settings is None: from ..core.setting_key import UserSettingValueManager settings = self._settings = UserSettingValueManager(user_class=self.__class__, user_id=self.id, json_settings=self.json_settings, ) return settings @property def theme_info(self): THEMES = settings.THEMES theme_name = self.theme for theme_info in settings.THEMES: if theme_name == theme_info[0]: return theme_info return THEMES[0] @property # NB notice that a cache is built def teams(self): assert not self.is_team teams = self._teams if teams is None: self._teams = teams = list(self.teams_set.all()) return teams @property # NB notice that cache and credentials are well updated when using this property def teammates(self): """Dictionary of teammates users key: user ID. value CremeUser instance. """ assert self.is_team teammates = self._teammates if teammates is None: logger.debug('User.teammates: Cache MISS for user_id=%s', self.id) self._teammates = teammates = self.teammates_set.in_bulk() else: logger.debug('User.teammates: Cache HIT for user_id=%s', self.id) return teammates @teammates.setter def teammates(self, users): assert self.is_team assert not any(user.is_team for user in users) self.teammates_set.set(users) self._teammates = None # Clear cache (we could rebuild it but ...) def _get_credentials(self, entity): creds_map = getattr(entity, '_credentials_map', None) if creds_map is None: entity._credentials_map = creds_map = {} creds = None else: creds = creds_map.get(self.id) if creds is None: logger.debug('CremeUser._get_credentials(): Cache MISS for id=%s user=%s', entity.id, self) creds_map[self.id] = creds = EntityCredentials(self, entity) else: logger.debug('CremeUser._get_credentials(): Cache HIT for id=%s user=%s', entity.id, self) return creds # Copied from auth.models.PermissionsMixin.has_perm def has_perm(self, perm, obj=None): """ Returns True if the user has the specified permission. This method queries all available auth backends, but returns immediately if any backend returns True. Thus, a user who has permission from a single auth backend is assumed to have permission in general. If an object is provided, permissions for this specific object are checked. """ if self.is_active and self.is_superuser: return True # Check the backends. return _user_has_perm(self, perm, obj) def has_perms(self, perm_list, obj=None): has_perm = self.has_perm return all(has_perm(perm, obj) for perm in perm_list) def has_perm_to_access(self, app_name): # TODO: rename "app_label" return self.is_superuser or self.role.is_app_allowed_or_administrable(app_name) @staticmethod # TODO: move in utils ? def _get_app_verbose_name(app_label): try: return apps.get_app_config(app_label).verbose_name except LookupError: return ugettext('Invalid app "{}"').format(app_label) def has_perm_to_access_or_die(self, app_label): if not self.has_perm_to_access(app_label): raise PermissionDenied(ugettext('You are not allowed to access to the app: {}').format( self._get_app_verbose_name(app_label) ) ) def has_perm_to_admin(self, app_name): # TODO: rename "app_label" return self.is_superuser or self.role.is_app_administrable(app_name) def has_perm_to_admin_or_die(self, app_name): # TODO: rename 'app_label' if not self.has_perm_to_admin(app_name): raise PermissionDenied(ugettext('You are not allowed to configure this app: {}').format( self._get_app_verbose_name(app_name) ) ) def has_perm_to_change(self, entity): if entity.is_deleted: return False main_entity = entity.get_real_entity().get_related_entity() \ if hasattr(entity.entity_type.model_class(), 'get_related_entity') \ else entity return self._get_credentials(main_entity).can_change() def has_perm_to_change_or_die(self, entity): if not self.has_perm_to_change(entity): raise PermissionDenied(ugettext('You are not allowed to edit this entity: {}').format( entity.allowed_str(self) ) ) def has_perm_to_create(self, model_or_entity): """Helper for has_perm() method. eg: user.has_perm('myapp.add_mymodel') => user.has_perm_to_create(MyModel) """ meta = model_or_entity._meta return self.has_perm('{}.add_{}'.format(meta.app_label, meta.object_name.lower())) def has_perm_to_create_or_die(self, model_or_entity): if not self.has_perm_to_create(model_or_entity): raise PermissionDenied(ugettext('You are not allowed to create: {}').format( model_or_entity._meta.verbose_name ) ) def has_perm_to_delete(self, entity): if hasattr(entity.entity_type.model_class(), 'get_related_entity'): # TODO: factorise return self._get_credentials(entity.get_real_entity().get_related_entity()).can_change() return self._get_credentials(entity).can_delete() def has_perm_to_delete_or_die(self, entity): if not self.has_perm_to_delete(entity): raise PermissionDenied(ugettext('You are not allowed to delete this entity: {}').format( entity.allowed_str(self) ) ) def has_perm_to_export(self, model_or_entity): # TODO: factorise with has_perm_to_create() ?? """Helper for has_perm() method. eg: user.has_perm('myapp.export_mymodel') => user.has_perm_to_export(MyModel) """ meta = model_or_entity._meta return self.has_perm('{}.export_{}'.format(meta.app_label, meta.object_name.lower())) def has_perm_to_export_or_die(self, model_or_entity): if not self.has_perm_to_export(model_or_entity): raise PermissionDenied(ugettext('You are not allowed to export: {}').format( model_or_entity._meta.verbose_name ) ) def has_perm_to_link(self, entity_or_model, owner=None): """Can the user link a future entity of a given class ? @param entity_or_model: {Instance of} class inheriting CremeEntity. @param owner: (only used when 1rst param is a class) Instance of auth.User ; owner of the (future) entity. 'None' means: is there an owner (at least) that allows linking. """ assert not self.is_team # Teams can not be logged, it has no sense from .entity import CremeEntity if isinstance(entity_or_model, CremeEntity): # TODO: what about related_entity ? return False if entity_or_model.is_deleted else \ self._get_credentials(entity_or_model).can_link() assert issubclass(entity_or_model, CremeEntity) return True if self.is_superuser else \ self.role.can_do_on_model(self, entity_or_model, owner, EntityCredentials.LINK) def has_perm_to_link_or_die(self, entity_or_model, owner=None): # TODO: factorise ?? from .entity import CremeEntity if not self.has_perm_to_link(entity_or_model, owner): if isinstance(entity_or_model, CremeEntity): msg = ugettext('You are not allowed to link this entity: {}').format( entity_or_model.allowed_str(self) ) else: msg = ugettext('You are not allowed to link: {}').format( entity_or_model._meta.verbose_name ) raise PermissionDenied(msg) def has_perm_to_unlink(self, entity): # TODO: what about related_entity ? return self._get_credentials(entity).can_unlink() def has_perm_to_unlink_or_die(self, entity): if not self.has_perm_to_unlink(entity): raise PermissionDenied(ugettext('You are not allowed to unlink this entity: {}').format( entity.allowed_str(self) ) ) def has_perm_to_view(self, entity): # TODO: what about related_entity ? return self._get_credentials(entity).can_view() def has_perm_to_view_or_die(self, entity): if not self.has_perm_to_view(entity): raise PermissionDenied(ugettext('You are not allowed to view this entity: {}').format( entity.allowed_str(self) ) )
class AbstractActivity(CremeEntity): """Activity : task, meeting, phone call, unavailability ...""" title = CharField(_('Title'), max_length=100) start = DateTimeField(_('Start'), blank=True, null=True) end = DateTimeField(_('End'), blank=True, null=True) description = TextField(_('Description'), blank=True).set_tags(optional=True) minutes = TextField(_('Minutes'), blank=True) place = CharField(_('Activity place'), max_length=500, blank=True)\ .set_tags(optional=True) duration = PositiveIntegerField(_('Duration (in hour)'), blank=True, null=True) type = ForeignKey( ActivityType, verbose_name=_('Activity type'), on_delete=PROTECT, ) sub_type = ForeignKey( ActivitySubType, verbose_name=_('Activity sub-type'), blank=True, null=True, on_delete=SET_NULL, ) status = ForeignKey( Status, verbose_name=_('Status'), blank=True, null=True, on_delete=SET_NULL, ) calendars = ManyToManyField( Calendar, verbose_name=_('Calendars'), blank=True, editable=False, ) is_all_day = BooleanField(_('All day?'), blank=True, default=False) busy = BooleanField(_('Busy?'), default=False) # TODO: use choices ; to be improved with choices: list-view search/field printers/history floating_type = PositiveIntegerField( _('Floating type'), default=NARROW, editable=False, ).set_tags(viewable=False) creation_label = _('Create an activity') save_label = _('Save the activity') class Meta: abstract = True manager_inheritance_from_future = True app_label = 'activities' verbose_name = _('Activity') verbose_name_plural = _('Activities') ordering = ('-start', ) def as_ical_event(self): r"""Return a normalized iCalendar event string /!\ Each parameter has to be separated by \n ONLY no spaces allowed! Example : BEGIN:VEVENT\nUID:http://cremecrm.com """ from ..utils import get_ical_date return """BEGIN:VEVENT UID:http://cremecrm.com DTSTAMP:{dtstamp} SUMMARY:{summary} DTSTART:{dtstart} DTEND:{dtend} LOCATION:{location} CATEGORIES:{categories} STATUS:{status} END:VEVENT """.format( dtstamp=get_ical_date(now()), summary=self.title, dtstart=get_ical_date(self.start), dtend=get_ical_date(self.end), location='', categories=self.type.name, status='', ) def get_title_for_calendar(self): return self.title @classmethod def get_creation_title(cls, type_id): return CREATION_LABELS.get(type_id, cls.creation_label) def __str__(self): return self.title def get_absolute_url(self): return reverse('activities__view_activity', args=(self.id, )) @staticmethod def get_create_absolute_url(): return reverse('activities__create_activity') def get_edit_absolute_url(self): return reverse('activities__edit_activity', args=(self.id, )) @staticmethod def get_lv_absolute_url(): return reverse('activities__list_activities') def get_participant_relations(self): return self.get_relations(REL_OBJ_PART_2_ACTIVITY, real_obj_entities=True) def get_subject_relations(self, real_entities=True): """Get the list of models.Relation instances which link the Activity with its subjects. @param real_entities Retrieve (efficiently) the real entities which are related. """ return self.get_relations(REL_OBJ_ACTIVITY_SUBJECT, real_obj_entities=real_entities) def get_linkedto_relations(self): return self.get_relations(REL_OBJ_LINKED_2_ACTIVITY, real_obj_entities=True) # TODO: move to manager the following methods # TODO: test @classmethod def _get_linked_aux(cls, entity): types = (REL_OBJ_PART_2_ACTIVITY, REL_OBJ_ACTIVITY_SUBJECT, REL_OBJ_LINKED_2_ACTIVITY) return cls.objects.filter(is_deleted=False, relations__object_entity=entity, relations__type__in=types, ) \ .distinct() @classmethod def _get_linked_for_ctypes_aux(cls, ct_ids): warnings.warn( 'AbstractActivity._get_linked_for_ctypes_aux() is deprecated.', DeprecationWarning) types = (REL_OBJ_PART_2_ACTIVITY, REL_OBJ_ACTIVITY_SUBJECT, REL_OBJ_LINKED_2_ACTIVITY) return cls.objects.filter(is_deleted=False, relations__object_entity__entity_type__in=ct_ids, relations__type__in=types, ) \ .distinct() # TODO: test @classmethod def _get_linked_for_orga(cls, orga): types = (REL_OBJ_PART_2_ACTIVITY, REL_OBJ_ACTIVITY_SUBJECT, REL_OBJ_LINKED_2_ACTIVITY) entities = [orga] entities.extend(orga.get_managers().values_list('id', flat=True)) entities.extend(orga.get_employees().values_list('id', flat=True)) return cls.objects.filter(is_deleted=False, relations__object_entity__in=entities, relations__type__in=types, ) \ .distinct() @classmethod def get_future_linked( cls, entity, today ): # TODO end greater than today or floating type equal to floating return cls._get_linked_aux(entity).filter( end__gt=today).order_by('start') @classmethod def get_future_linked_for_ctypes(cls, ct_ids, today): warnings.warn( 'AbstractActivity.get_future_linked_for_ctypes() is deprecated.', DeprecationWarning) return cls._get_linked_for_ctypes_aux(ct_ids).filter( end__gt=today).order_by('start') @classmethod def get_future_linked_for_orga(cls, orga, today): return cls._get_linked_for_orga(orga).filter( end__gt=today).order_by('start') @classmethod def get_past_linked(cls, entity, today): return cls._get_linked_aux(entity).filter( end__lte=today).order_by('-start') @classmethod def get_past_linked_for_ctypes(cls, ct_ids, today): warnings.warn( 'AbstractActivity.get_past_linked_for_ctypes() is deprecated.', DeprecationWarning) return cls._get_linked_for_ctypes_aux(ct_ids).filter( end__lte=today).order_by('-start') @classmethod def get_past_linked_for_orga(cls, orga, today): return cls._get_linked_for_orga(orga).filter( end__lte=today).order_by('-start') def handle_all_day(self): if self.is_all_day: self.start = self.start.replace(hour=0, minute=0) self.end = self.end.replace(hour=23, minute=59) def _pre_save_clone(self, source): # TODO: Explicit this into description ? Move the activity to another time-slot ? if source.busy: self.busy = False # TODO: move to utils ? def is_auto_orga_subject_enabled(self): # CACHE_NAME = '_auto_orga_subject_cache' # enabled = getattr(self, CACHE_NAME, None) # # if enabled is None: # try: # sv = SettingValue.objects.get(key_id=SETTING_AUTO_ORGA_SUBJECTS) # except SettingValue.DoesNotExist: # logger.critical('SettingValue with key=%s cannot be found !' # ' ("creme_populate" command has not been run correctly)', # SETTING_AUTO_ORGA_SUBJECTS # ) # enabled = False # else: # enabled = sv.value # # setattr(self, CACHE_NAME, enabled) # # return enabled return SettingValue.objects.get_4_key(auto_subjects_key, default=False).value @staticmethod def display_review(): warnings.warn( 'AbstractActivity.display_review() is deprecated ; ' 'use "SettingValue.objects.get_4_key(setting_keys.review_key).value" instead.', DeprecationWarning) from ..constants import SETTING_DISPLAY_REVIEW return SettingValue.objects.get(key_id=SETTING_DISPLAY_REVIEW).value def _copy_relations(self, source): # super(AbstractActivity, self)._copy_relations(source, allowed_internal=[REL_OBJ_PART_2_ACTIVITY]) super()._copy_relations(source, allowed_internal=[REL_OBJ_PART_2_ACTIVITY]) def _pre_delete(self): for relation in self.relations.filter(type=REL_OBJ_PART_2_ACTIVITY): relation._delete_without_transaction()
class Transaction(Model): """ Transaction: Scorebot Score Base Defines a Base Python Class object for tracking and managing score types, results and values. Allows for tracking of the "score stack", which is a history of all Transactions for a Team over time. Subclasses Must Define: save () __json__ () __score__ () __string__ () """ class Meta: verbose_name = '[Score] Transaction' verbose_name_plural = '[Score] Transaction' value = IntegerField('Transaction Value', default=0) when = DateTimeField('Transaction Date/Time', auto_now_add=True) previous = OneToOneField('self', null=True, blank=True, on_delete=SET_NULL) source = ForeignKey('scorebot_db.ScoreTeam', on_delete=CASCADE, related_name='score_source') destination = ForeignKey('scorebot_db.ScoreTeam', on_delete=CASCADE, related_name='score_destination') subclass = SmallIntegerField('Team SubClass', default=None, null=True, editable=False, choices=SCORE_SUBCLASS) def log(self): # Log the Score to a Flat File (Triggered on Saves). # # Columns # Value, Type, ISO When, Path From, Path To, Score Scoring.info('%d,%s,%s,%s,%s,%d' % (self.get_score(), self.get_name(), self.when.isoformat(), self.source.get_path(), self.destination.get_path(), self.destination.get_score())) def name(self): return str(self.__subclass__().__class__.__name__) def json(self): return self.__subclass__().__json__() def score(self): return self.__subclass__().__score__() def stack(self): total = 0 score = self stack = list() while score is not None: stack.append(score.json()) total += score.score() score = next(score) result = {'stack': stack, 'total': total} del stack del total return result def total(self): if self.previous is not None: return self.score() + self.previous.score() return self.score() def reverse(self): transaction = new(self.name(), save=False) transaction.when = self.when transaction.subclass = self.subclass transaction.value = self.score() * -1 transaction.destination = self.source transaction.source = self.destination transaction.save() return transaction def __str__(self): return self.__subclass__().__string__() def __len__(self): return abs(self.score()) def __next__(self): return self.previous def __bool__(self): return self.score() > 0 def __json__(self): return { 'type': self.name(), 'value': self.get_score(), 'when': self.when.isoformat(), 'source': self.source.name, 'destination': self.destination.name } def __score__(self): return self.value def __string__(self): return '[Transaction] (%s) %d: %s -> %s' % ( self.when.strftime('%m/%d/%y %H:%M'), self.value, self.source.path(), self.destination.path()) def __subclass__(self): if self.subclass == SCORE_SUBCLASS_TRANSACTION or self.__class__.__name__ == self.get_subclass_display( ): return self if self.subclass == SCORE_SUBCLASS_PAYMENT: return self.payment if self.subclass == SCORE_SUBCLASS_TRANSFER: return self.transfer if self.subclass == SCORE_SUBCLASS_PURCHASE: return self.purchase if self.subclass == SCORE_SUBCLASS_CORRECTION: return self.correction if self.subclass == SCORE_SUBCLASS_PAYMENTHEALTH: return self.paymenthealth if self.subclass == SCORE_SUBCLASS_TRANSFERRESULT: return self.transferresult if self.subclass == SCORE_SUBCLASS_TRANSACTIONFLAG: return self.transactionflag if self.subclass == SCORE_SUBCLASS_TRANSACTIONBEACON: return self.transactionbeacon return self def __lt__(self, other): return isinstance(other, Transaction) and other.score() > self.score() def __gt__(self, other): return isinstance(other, Transaction) and other.score() < self.score() def __eq__(self, other): return isinstance(other, Transaction) and other.score() == self.score() def save(self, *args, **kwargs): if self.subclass is None: self.subclass = SCORE_SUBCLASS_TRANSACTION Model.save(self, *args, **kwargs)
def __init__(self, output_field=None, **extra): if output_field is None: output_field = DateTimeField() super(Now, self).__init__(output_field=output_field, **extra)
def changelist_view(self, request, extra_context=None): response = super().changelist_view( request, extra_context=extra_context, ) try: qs = response.context_data['cl'].queryset except (AttributeError, KeyError): return response metrics = { 'total': Count('id'), 'total_sales': Sum('price'), } if not request.user.is_superuser: qs = qs.filter( flight__avio_company=request.user.adminuser.avio_admin) row = list(qs.values('flight').annotate(**metrics).order_by('flight')) flights = qs.values('flight').annotate( **metrics).order_by('flight').values('flight') flights = Flight.objects.filter(pk__in=flights) flight_summary = zip(flights, row) response.context_data['flight_summary'] = flight_summary response.context_data['summary_total'] = dict(qs.aggregate(**metrics)) period = get_next_in_date_hierarchy(request, self.date_hierarchy) response.context_data['period'] = period summary_over_time = qs.annotate(period=Trunc( 'time', period, output_field=DateTimeField()), ).values('period').annotate( total=Sum('price')).order_by('period') tickets_sold = qs.annotate(period=Trunc( 'time', period, output_field=DateTimeField()), ).values('period').annotate( total=Count('id')).order_by('period') if period == "month": response.context_data['label_list'] = json.dumps([ 'January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'Novemer', 'December' ]) val_list = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] tic_list = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] for x in summary_over_time: val_list[int(x['period'].strftime('%m')) - 1] = x['total'] response.context_data['val_list'] = json.dumps(val_list) for x in tickets_sold: tic_list[int(x['period'].strftime('%m')) - 1] = x['total'] response.context_data['tic_list'] = json.dumps(tic_list) response.context_data['week'] = True week_label_list = [] for x in range(1, 53): week_label_list.append(str(x) + "week") response.context_data['week_label_list'] = json.dumps( week_label_list) summary_over_time = qs.annotate(period=Trunc( 'time', 'week', output_field=DateTimeField()), ).values('period').annotate( total=Sum('price')).order_by('period') tickets_sold = qs.annotate(period=Trunc( 'time', 'week', output_field=DateTimeField()), ).values('period').annotate( total=Count('id')).order_by('period') week_val_list = [0] * 52 week_tic_list = [0] * 52 for x in summary_over_time: week_val_list[int(x['period'].strftime('%U'))] = x['total'] response.context_data['week_val_list'] = json.dumps(week_val_list) for x in tickets_sold: week_tic_list[int(x['period'].strftime('%U'))] = x['total'] response.context_data['week_tic_list'] = json.dumps(week_tic_list) elif period == "day": response.context_data['week'] = False label_list = [] for x in range(1, 32): label_list.append(str(x)) response.context_data['label_list'] = json.dumps(label_list) val_list = [0] * 31 tic_list = [0] * 31 for x in summary_over_time: val_list[int(x['period'].strftime('%d')) - 1] = x['total'] response.context_data['val_list'] = json.dumps(val_list) for x in tickets_sold: tic_list[int(x['period'].strftime('%d')) - 1] = x['total'] response.context_data['tic_list'] = json.dumps(tic_list) return response
class Bounty(Model): # FIELDS title = CharField(max_length=100) description = TextField() deadline = DateField() state = CharField(max_length=64, choices=STATE_CHOICES, default='PENDING') private = BooleanField(default=True) target_reward = DecimalField(max_digits=512, decimal_places=256) fees = DecimalField(max_digits=512, decimal_places=256) asset = CharField(max_length=100) # MANY TO MANY tags = ManyToManyField('tags.Tag', related_name="bounties") keywords = ManyToManyField('search.Keyword', related_name="bounties") comments = ManyToManyField( 'comment.Comment', related_name="bounty_comments", null=True, blank=True ) # METADATA created_on = DateTimeField(auto_now_add=True) created_by = ForeignKey('auth.User', related_name="bounties_created") updated_on = DateTimeField(auto_now=True) updated_by = ForeignKey("auth.User", related_name="bounties_updated") cancelled_on = DateTimeField(null=True, blank=True) cancelled_by = ForeignKey( "auth.User", related_name="bounties_cancelled", null=True, blank=True ) # CASHED FOR VIEWS ONLY!!! cashed_claim_count = IntegerField(default=0) cashed_reward = DecimalField( max_digits=512, decimal_places=256, default=Decimal("0.0") ) @property def fraction_reward(self): return (Decimal("1.0") / (Decimal("1.0") + self.fees)) @property def fraction_fees(self): return Decimal("1.0") - self.fraction_reward @property def received(self): """ Total funds received. """ total = Decimal("0.0") for userfund in self.userfunds.all(): total = total + userfund.received return total @property def target_funds(self): amount = (self.target_reward / self.fraction_reward) return asset_control.get_manager(self.asset).quantize(amount) @property def funds_needed(self): needed = self.target_funds - self.received if needed < Decimal("0.0"): return Decimal("0.0") return needed @property def funded_ratio(self): if not self.target_funds: return Decimal("1") return self.received / self.target_funds @property def slug(self): return uslugify(self.title) @property def public(self): return not self.private @property def awarded(self): from apps.claim.models import Claim # prevent circular import ... return get_object_or_none(Claim, bounty=self, successful=True) @property def display_fees(self): if self.awarded and self.awarded.payout: received = (self.awarded.payout.amount / self.fraction_reward) else: usetarget = self.state == "PENDING" and self.target_funds > self.received received = usetarget and self.target_funds or self.received amount = received * self.fraction_fees return asset_control.get_manager(self.asset).quantize(amount) @property def reward(self): if self.awarded and self.awarded.payout: return self.awarded.payout.amount am = asset_control.get_manager(self.asset) return self.received - am.quantize(self.received * self.fraction_fees) @property def display_reward(self): if self.awarded and self.awarded.payout: return self.awarded.payout.amount usetarget = self.state == "PENDING" and self.target_funds > self.received received = usetarget and self.target_funds or self.received reward = received - self.display_fees if self.cashed_reward > reward: return self.cashed_reward return reward @property def display_send_transactions(self): txlist = [] for userfund in self.userfunds.all(): txlist = txlist + userfund.display_send_transactions if self.awarded and self.awarded.payout: payment = self.awarded.payout tx = payment.transaction tx["user"] = self.awarded.user # add user for use in templates tx["type"] = _("PAYOUT") # add type for use in templates txlist.append(tx) return sorted(txlist, key=lambda tx: tx["timereceived"], reverse=True) @property def display_receive_transactions(self): txlist = [] for userfund in self.userfunds.all(): txlist = txlist + userfund.display_receive_transactions return sorted(txlist, key=lambda tx: tx["timereceived"], reverse=True) @property def url_funds(self): return "/bounty/%s/funds/%s" % (self.id, self.slug) @property def url_details(self): return "/bounty/%s/details/%s" % (self.id, self.slug) @property def url_claims(self): return "/bounty/%s/claims/%s" % (self.id, self.slug) @property def url_comments(self): return "/bounty/%s/comments/%s" % (self.id, self.slug) @property def url_makepublic(self): return "/bounty/%s/makepublic/%s" % (self.id, self.slug) @property def url_cancel(self): return "/bounty/%s/cancel/%s" % (self.id, self.slug) @property def url_edit(self): return "/bounty/%s/edit/%s" % (self.id, self.slug) @property def url_delete(self): return "/bounty/%s/delete/%s" % (self.id, self.slug) @property def url_declare_unresolved(self): return "/bounty/%s/declare/unresolved" % self.id @property def url_setrefund(self): return "/userfund/setrefund/%s" % self.id @property def url_claim(self): return "/claim/create/%s" % self.id def __unicode__(self): from apps.asset.templatetags.asset_tags import render_asset return "%i: %s - %s - Reward: %s - Deadline: %s - Created: %s" % ( self.id, self.title, self.state, render_asset(self.display_reward, self.asset), self.deadline, self.created_on )
def changelist_view(self, request, extra_context=None): response = super(ClosingDistributionAdmin, self).changelist_view( request, extra_context=extra_context, ) try: qs = response.context_data['cl'].queryset.filter(closed=True) except (AttributeError, KeyError): return response metrics = { 'total': Count('id'), 'total_sale_price': Sum( F('ri_proceeds') + F('city_proceeds') + F('city_loan_proceeds')), 'total_city_proceeds': Sum('city_proceeds'), 'total_city_loan_proceeds': Sum('city_loan_proceeds'), 'total_ri_proceeds': Sum('ri_proceeds'), } response.context_data['summary'] = list( qs.values('application__application_type').annotate( **metrics).order_by('-application__application_type')) response.context_data['summary_total'] = dict(qs.aggregate(**metrics)) period = get_next_in_date_hierarchy( request, self.date_hierarchy, ) response.context_data['scale'] = period summary_over_time = qs.annotate(period=Trunc( 'date_time', period, output_field=DateTimeField(), ), ).values('period').annotate(total=Sum( F('ri_proceeds') + F('city_proceeds') + F('city_loan_proceeds')), count=Count('id')).order_by('period') summary_range = summary_over_time.aggregate( low=Min('count'), high=Max('count'), ) high = summary_range.get('high', 0) low = summary_range.get('low', 0) response.context_data['summary_over_time'] = [{ 'period': x['period'], 'total': x['total'] or 0, 'count': x['count'], 'high': high, 'low': low, 'pct': \ float( float(x['count'] - 0) / float(high-0) ) * 100,# if x['count'] != low else 1, } for x in summary_over_time] return response
class Comment(Model): time = DateTimeField(auto_created=True, auto_now_add=True) drawing = ForeignKey('Drawing', on_delete=CASCADE) author = ForeignKey('Author', on_delete=SET_NULL, null=True) comment = CharField(max_length=240)
class Journal(models.Model): key_out_date = DateTimeField(null=True) key_in_date = DateTimeField(auto_now_add=True) tenant = ForeignKey(Tenant, on_delete=DO_NOTHING) key = ForeignKey(Key, on_delete=DO_NOTHING)
def punishment(request, u=None, validated={}, *args, **kwargs): try: user = User.objects.get(id=u) except Exception as e: return 'non existent user queried - {}'.format(e), 403 Punishment.objects\ .annotate(completion=ExpressionWrapper(F('created_at') + F('length'), output_field=DateTimeField()))\ .filter(completion__lte=timezone.now(), resolved=False, length__isnull=False).update(resolved=True) if request.method == 'GET': punishments = Punishment.objects.filter(user=user) if validated['server'] is not None: server = Server.objects.get(id=validated['server']) punishments = punishments.filter(Q(server=server) | Q(server=None)) if validated['resolved'] is not None: punishments = punishments.filter(resolved=validated['resolved']) if validated['muted'] is not None: punishments = punishments.filter(is_muted=validated['muted']) if validated['banned'] is not None: punishments = punishments.filter(is_banned=validated['banned']) if validated['gagged'] is not None: punishments = punishments.filter(is_gagged=validated['gagged']) if validated['kicked'] is not None: punishments = punishments.filter(is_kicked=validated['kicked']) return [p for p in punishments.annotate(admin=F('created_by__namespace')) .values('id', 'user', 'server', 'created_at', 'reason', 'resolved', 'created_by', 'length', 'is_banned', 'is_kicked', 'is_muted', 'is_gagged', 'admin')], 200 elif request.method == 'PUT': if 'server' in validated: server = Server.objects.get(id=validated['server']) else: server = None if validated['length'] > 0: length = datetime.timedelta(seconds=validated['length']) else: length = None punishment = Punishment(user=user, server=server, reason=validated['reason'], is_muted=validated['muted'], is_gagged=validated['gagged'], is_kicked=validated['kicked'], is_banned=validated['banned'], length=length, created_by=request.user) punishment.save() if validated['plugin']: server = [server] if server else Server.objects.all() for s in server: if punishment.is_gagged or punishment.is_muted: SourcemodPluginWrapper(s).mutegag(punishment) if punishment.is_banned: SourcemodPluginWrapper(s).ban(punishment) if punishment.is_kicked: punishment.resolved = True punishment.save() SourcemodPluginWrapper(s).kick(punishment)
class UploadedImages(Model): # Fix pluralization in admin panel class Meta: verbose_name_plural = "Uploaded Images" # Define image categories to be displayed under in ~/templates/our-work.html CATEGORIES = ( ('No_Category', 'Select a Category'), ('House_Wash', 'House Wash'), ('Wood_Restoring', 'Wood Restoring'), ('Oxidation_Removal', 'Oxidation Removal'), ('Stain_Removal', 'Stain Removal'), ) DEGREES = ( (0, '0 degrees'), (270, '90 degrees (90 degrees clockwise)'), (180, '180 degrees (upside-down)'), (90, '270 degrees (90 degrees counter-clockwise)'), ) # Define the user image input fields in the Django admin panel Category = CharField(max_length=64, null=True, choices=CATEGORIES, default='No_Category') Before_Picture_Description = CharField(max_length=64, null=True, blank=True) Before_Picture_Size_kB = IntegerField(null=True, default=140) Before_Picture_Max_Dimension = IntegerField(null=True, default=768) Before_Picture_Rotation = IntegerField(null=True, choices=DEGREES, default=0) Before_Picture = ImageField(upload_to='images/', null=True) After_Picture_Description = CharField(max_length=64, null=True, blank=True) After_Picture_Size_kB = IntegerField(null=True, default=140) After_Picture_Max_Dimension = IntegerField(null=True, default=768) After_Picture_Rotation = IntegerField(null=True, choices=DEGREES, default=0) After_Picture = ImageField(upload_to='images/', null=True) date = DateTimeField(auto_now_add=True, null=True) Notes = TextField(max_length=200, null=True, blank=True) # Add some extra functionality to the default behavior of the *.save() method # via the *.super() method def save(self, *args, **kwargs): if self.Before_Picture: # Note: this will overwrite the image uploaded by the user self.Before_Picture = self.resize_image( self.Before_Picture, self.Before_Picture_Size_kB, self.Before_Picture_Max_Dimension, self.Before_Picture_Rotation) self.After_Picture = self.resize_image( self.After_Picture, self.After_Picture_Size_kB, self.After_Picture_Max_Dimension, self.After_Picture_Rotation) super(UploadedImages, self).save(*args, **kwargs) # Resize user-uploaded images # https://stackoverflow.com/questions/3723220/how-do-you-convert-a-pil-image-to-a-django-file def resize_image(self, picture, size_target, max_dim, rotation): # Set variables for the *.binary_search() method size_target = size_target * 1000 # Ideal image size (in bytes) dimensions = [(max_dim, max_dim)] # Dimensions for *.thumbnail() dimension_factor = 1 # For generating 1x, 2x (retina), or higher res. i = 1 # Iteration starting point max_i = 7 # Max number of iterations quality = 50 # Starting quality value L = 1 # Left pointer R = 100 # Right pointer # Run the binary search algorithm once for each set of dimensions you want to # create images at, ie. 320, 576, 768, etc. Currently there is no implementation # on the front-end to support more than one set of dimensions, but I'm keeping # the FOR loop here anyways so I know where to start if I implement multiple # dimensions later in order to support responsive images. for dimension in dimensions: im_buffer = self.binary_search(picture, size_target, dimension, dimension_factor, rotation, i, max_i, quality, L, R) # When files are uploaded in Django they are stored in a dictionary called # request.FILES as "UploadedFile" objects (or a subclass like # InMemoryUploadedFile). We can try to grab the BytesIO object and convert it # back into a File object (or "Django" File object) while the BytesIO object # is in memory, ie. while it exists within this function. # # picture.name: *.name is a Django File object attribute that includes the # name of the file plus its relative path from MEDIA_ROOT # # Syntax: # InMemoryUploadedFile(file, field_name, name, content_type, size, charset) if im_buffer is not None: im_resized_file = InMemoryUploadedFile( im_buffer, None, picture.name, 'image/jpeg', im_buffer.getbuffer().nbytes, None) return im_resized_file else: print("{} was not altered".format(picture)) return picture # Binary search algorithm that uses 3 pointers -- L, R, and quality, where the # value for quality is used by PIL's *.save() method to set the quality of an # image -- in an attempt to find a quality that produces an image with an file # size that is as close to the value for size_target as max_i number of # iterations will allow (close, but not perfect, could be memoized I think). def binary_search(self, picture, size_target, dimension, dimension_factor, rotation, i, max_i, quality, L, R, im_buffer=None): # It's possible that the picture file size is already less than the target # file size, but we can still rotate the image here. if picture.size < size_target: print("{} is already less than {} bytes".format( picture, size_target)) im = Image.open(picture) if rotation == 90: im = im.transpose(Image.ROTATE_90) elif rotation == 180: im = im.transpose(Image.ROTATE_180) elif rotation == 270: im = im.transpose(Image.ROTATE_270) im_buffer = BytesIO() im.save(im_buffer, "JPEG", quality=quality) return im_buffer # If the maximum number of iterations have been reached, return if i > max_i: print("Max iterations have been reached for {}".format(picture)) return im_buffer # Open the image file, alter its dimensions, and save it as a new BytesIO file # named 'im_buffer'. if quality <= 95: im = Image.open(picture) if rotation == 90: im = im.transpose(Image.ROTATE_90) elif rotation == 180: im = im.transpose(Image.ROTATE_180) elif rotation == 270: im = im.transpose(Image.ROTATE_270) new_dimension = (dimension[0] * dimension_factor, dimension[1] * dimension_factor) im.thumbnail(new_dimension, Image.ANTIALIAS) # new_prefix = '{}x-'.format(dimension_factor) # new_name = new_prefix + name + '-' + str(dimension[0]) + '.jpg' im_buffer = BytesIO() im.save(im_buffer, "JPEG", quality=quality) # Use L and R pointers to move closer to a value for the 'quality' parameter # that produces an image with a file size, in bytes, as close to size_target # as possible using a binary search-type of algorithm. if im_buffer.getbuffer().nbytes < size_target: print( 'Resulting image size is LESS than {} bytes:'.format( size_target), im_buffer.getbuffer().nbytes, 'bytes, quality =', quality) L = quality quality = int((R + L) / 2) return self.binary_search(picture, size_target, dimension, dimension_factor, rotation, i + 1, max_i, quality, L, R, im_buffer) elif im_buffer.getbuffer().nbytes > size_target: print( 'Resulting image size is GREATER than {} bytes:'.format( size_target), im_buffer.getbuffer().nbytes, 'bytes, quality =', quality) R = quality quality = int((R + L) / 2) return self.binary_search(picture, size_target, dimension, dimension_factor, rotation, i + 1, max_i, quality, L, R, im_buffer) else: print( 'Resulting image size EQUALS {} bytes:'.format( size_target), im_buffer.getbuffer().nbytes, 'bytes, quality =', quality) return im_buffer else: return im_buffer
def migrate(self): if not self.db.has_column("auth_user", "last_login"): self.db.add_column( "auth_user", "last_login", DateTimeField("Last Login", blank=True, null=True))
def iterate_orders(self, form_data: dict): p_date = OrderPayment.objects.filter( order=OuterRef('pk'), state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED), payment_date__isnull=False).values('order').annotate( m=Max('payment_date')).values('m').order_by() p_providers = OrderPayment.objects.filter( order=OuterRef('pk'), state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED, OrderPayment.PAYMENT_STATE_PENDING, OrderPayment.PAYMENT_STATE_CREATED), ).values('order').annotate( m=GroupConcat('provider', delimiter=',')).values('m').order_by() i_numbers = Invoice.objects.filter( order=OuterRef('pk'), ).values('order').annotate(m=GroupConcat( 'full_invoice_no', delimiter=', ')).values('m').order_by() s = OrderPosition.objects.filter( order=OuterRef('pk')).order_by().values('order').annotate( k=Count('id')).values('k') qs = Order.objects.filter(event__in=self.events).annotate( payment_date=Subquery(p_date, output_field=DateTimeField()), payment_providers=Subquery(p_providers, output_field=CharField()), invoice_numbers=Subquery(i_numbers, output_field=CharField()), pcnt=Subquery( s, output_field=IntegerField())).select_related('invoice_address') qs = self._date_filter(qs, form_data, rel='') if form_data['paid_only']: qs = qs.filter(status=Order.STATUS_PAID) tax_rates = self._get_all_tax_rates(qs) headers = [ _('Event slug'), _('Order code'), _('Order total'), _('Status'), _('Email'), _('Phone number'), _('Order date'), _('Order time'), _('Company'), _('Name'), ] name_scheme = PERSON_NAME_SCHEMES[ self.event.settings. name_scheme] if not self.is_multievent else None if name_scheme and len(name_scheme['fields']) > 1: for k, label, w in name_scheme['fields']: headers.append(label) headers += [ _('Address'), _('ZIP code'), _('City'), _('Country'), pgettext('address', 'State'), _('Custom address field'), _('VAT ID'), _('Date of last payment'), _('Fees'), _('Order locale') ] for tr in tax_rates: headers += [ _('Gross at {rate} % tax').format(rate=tr), _('Net at {rate} % tax').format(rate=tr), _('Tax value at {rate} % tax').format(rate=tr), ] headers.append(_('Invoice numbers')) headers.append(_('Sales channel')) headers.append(_('Requires special attention')) headers.append(_('Comment')) headers.append(_('Follow-up date')) headers.append(_('Positions')) headers.append(_('E-mail address verified')) headers.append(_('Payment providers')) if form_data.get('include_payment_amounts'): payment_methods = self._get_all_payment_methods(qs) for id, vn in payment_methods: headers.append(_('Paid by {method}').format(method=vn)) yield headers full_fee_sum_cache = { o['order__id']: o['grosssum'] for o in OrderFee.objects.values('tax_rate', 'order__id').order_by( ).annotate(grosssum=Sum('value')) } fee_sum_cache = { (o['order__id'], o['tax_rate']): o for o in OrderFee.objects.values('tax_rate', 'order__id').order_by( ).annotate(taxsum=Sum('tax_value'), grosssum=Sum('value')) } if form_data.get('include_payment_amounts'): payment_sum_cache = { (o['order__id'], o['provider']): o['grosssum'] for o in OrderPayment.objects.values( 'provider', 'order__id').order_by().filter(state__in=[ OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED ]).annotate(grosssum=Sum('amount')) } refund_sum_cache = { (o['order__id'], o['provider']): o['grosssum'] for o in OrderRefund.objects.values( 'provider', 'order__id').order_by().filter(state__in=[ OrderRefund.REFUND_STATE_DONE, OrderRefund.REFUND_STATE_TRANSIT ]).annotate(grosssum=Sum('amount')) } sum_cache = { (o['order__id'], o['tax_rate']): o for o in OrderPosition.objects.values('tax_rate', 'order__id'). order_by().annotate(taxsum=Sum('tax_value'), grosssum=Sum('price')) } yield self.ProgressSetTotal(total=qs.count()) for order in qs.order_by('datetime').iterator(): tz = pytz.timezone( self.event_object_cache[order.event_id].settings.timezone) row = [ self.event_object_cache[order.event_id].slug, order.code, order.total, order.get_status_display(), order.email, str(order.phone) if order.phone else '', order.datetime.astimezone(tz).strftime('%Y-%m-%d'), order.datetime.astimezone(tz).strftime('%H:%M:%S'), ] try: row += [ order.invoice_address.company, order.invoice_address.name, ] if name_scheme and len(name_scheme['fields']) > 1: for k, label, w in name_scheme['fields']: row.append(order.invoice_address.name_parts.get(k, '')) row += [ order.invoice_address.street, order.invoice_address.zipcode, order.invoice_address.city, order.invoice_address.country if order.invoice_address.country else order.invoice_address.country_old, order.invoice_address.state, order.invoice_address.custom_field, order.invoice_address.vat_id, ] except InvoiceAddress.DoesNotExist: row += [''] * (9 + (len(name_scheme['fields']) if name_scheme and len(name_scheme['fields']) > 1 else 0)) row += [ order.payment_date.astimezone(tz).strftime('%Y-%m-%d') if order.payment_date else '', full_fee_sum_cache.get(order.id) or Decimal('0.00'), order.locale, ] for tr in tax_rates: taxrate_values = sum_cache.get((order.id, tr), { 'grosssum': Decimal('0.00'), 'taxsum': Decimal('0.00') }) fee_taxrate_values = fee_sum_cache.get( (order.id, tr), { 'grosssum': Decimal('0.00'), 'taxsum': Decimal('0.00') }) row += [ taxrate_values['grosssum'] + fee_taxrate_values['grosssum'], (taxrate_values['grosssum'] - taxrate_values['taxsum'] + fee_taxrate_values['grosssum'] - fee_taxrate_values['taxsum']), taxrate_values['taxsum'] + fee_taxrate_values['taxsum'], ] row.append(order.invoice_numbers) row.append(order.sales_channel) row.append(_('Yes') if order.checkin_attention else _('No')) row.append(order.comment or "") row.append( order.custom_followup_at.strftime("%Y-%m-%d") if order. custom_followup_at else "") row.append(order.pcnt) row.append(_('Yes') if order.email_known_to_work else _('No')) row.append(', '.join([ str(self.providers.get(p, p)) for p in sorted(set(( order.payment_providers or '').split(','))) if p and p != 'free' ])) if form_data.get('include_payment_amounts'): payment_methods = self._get_all_payment_methods(qs) for id, vn in payment_methods: row.append( payment_sum_cache.get((order.id, id), Decimal('0.00')) - refund_sum_cache.get((order.id, id), Decimal('0.00'))) yield row
class Case(Model): """案件 * state: 案件狀態, 預設值為未成案 * uuid: 案件編號(uuid4) * type: 案件類別 * region: 使用者所在選區 * title: 標題 * content: 案件內容 * location: 相關地址 * username: 使用者名字 * mobile: 手機 * email: 信箱 * address: 地址 * open_time: 成案日期 * close_time: 結案日期 * update_time: 上次更新時間 """ state = FSMField(default=State.DRAFT, verbose_name=_('Case State'), choices=State.CHOICES) uuid = UUIDField(default=uuid.uuid4, verbose_name=_('UUID'), unique=True) number = CharField(max_length=6, default='-', null=True, blank=True, verbose_name=_('Case Number')) type = ForeignKey('cases.Type', on_delete=CASCADE, related_name='cases', verbose_name=_('Case Type')) region = ForeignKey('cases.Region', on_delete=CASCADE, related_name='cases', verbose_name=_('User Region')) title = CharField(max_length=255, verbose_name=_('Case Title')) content = TextField(verbose_name=_('Content')) location = CharField(null=True, blank=True, max_length=255, verbose_name=_('Location')) username = CharField(max_length=50, verbose_name=_('Username')) mobile = CharField(max_length=10, null=True, blank=True, verbose_name=_('Mobile')) email = EmailField(null=True, blank=True, verbose_name=_('Email')) address = CharField(null=True, blank=True, max_length=255, verbose_name=_('Address')) open_time = DateTimeField(null=True, blank=True, verbose_name=_('Opened Time')) close_time = DateTimeField(null=True, blank=True, verbose_name=_('Closed Time')) create_time = DateTimeField(auto_now_add=True, null=True, blank=True, verbose_name=_('Created Time')) update_time = DateTimeField(auto_now=True, null=True, blank=True, verbose_name=_('Updated Time')) disapprove_info = TextField(null=True, blank=True, verbose_name=_('Disapprove Info')) note = TextField(null=True, blank=True, verbose_name=_('Case Notes')) tags = TagField(blank=True, verbose_name=_('Case Tags')) objects = CaseQuerySet.as_manager() class Meta: verbose_name = _('Case') verbose_name_plural = _('Cases') ordering = ('id', ) def save(self, *args, **kwargs): created = self.pk is None super(Case, self).save(*args, **kwargs) if created: self.number = str(self.pk).zfill(6) self.save() self.confirm(template_name='收件通知') self.move_file() def __str__(self): return self.number def to_dict(self): """用於新增CaseHistory""" return { 'state': self.state, 'title': self.title, 'type': self.type, 'region': self.region, 'content': self.content, 'location': self.location, 'username': self.username, 'mobile': self.mobile, 'email': self.email, 'address': self.address, } def move_file(self): case = Case.objects.get(uuid=self.uuid) objs = TempFile.objects.filter(case_uuid=self.uuid) for i in objs: file = TEMP_STORAGE.open(i.file.name) case_file = CaseFile() case_file.case = case case_file.file = file case_file.save()
class RootKey(TimestampedModel): """A root key for signing macaroons.""" id = BigAutoField(primary_key=True, verbose_name="ID") material = BinaryField() expiration = DateTimeField()
class Job(Model): """A job represents a work which has to be done in the 'background' (ie: another process than the processes which respond to the clients). They are useful for periodic tasks (eg: polling data, like emails, from another server) or long tasks (eg: generating a lot of data). The type of the job (see creme_core.creme_jobs.base.JobType) determines if the job is periodic, pseudo-periodic or not periodic. Periodic & pseudo-periodic (see JobType for the difference between them) Jobs must be 'system' Job: - they are created in 'populate' scripts. - they have no user. - they can not be deleted, but they can be disabled (see 'enabled' field). - periodic Jobs must have their 'periodicity' field filled. - pseudo-periodic Jobs should not have their 'periodicity' field filled, because it is useless ; the value settings.PSEUDO_PERIOD is used as security period instead. Not periodic Jobs are user Jobs: - they are dynamically created by a view. - they must have their 'user' filled; it correspond to the User which have created the Job, & who owns it. The Job should act with the credentials of this User. - A view which creates a Job should check settings.MAX_JOBS_PER_USER before creating a Job, and redirect to the jobs list view if the Job can not be created (tip: you can use Job.not_finished_jobs()). - They have to be deleted once they are finished, in order to create other user Jobs. The 'reference_run' field is always filled (in an automatic way at least), but does not means anything for not periodic Jobs ; in this case it is only the creation date, which is not very useful. The 'reference_run' is used to compute the time of each execution, which must be something like: reference_run + N * periodicity """ STATUS_WAIT = 1 STATUS_ERROR = 10 STATUS_OK = 20 type_id = CharField(_(u'Type of job'), max_length=48, editable=False) user = CremeUserForeignKey(verbose_name=_(u'User'), null=True, editable=False) enabled = BooleanField(_(u'Enabled'), default=True, editable=False) language = CharField(_(u'Language'), max_length=10, editable=False) # created = CreationDateTimeField(_('Creation date')) reference_run = DateTimeField(_(u'Reference run')) periodicity = DatePeriodField(_(u'Periodicity'), null=True) last_run = DateTimeField(_(u'Last run'), null=True, editable=False) ack_errors = PositiveIntegerField( default=0, editable=False) # Number of errors of communication with the queue. status = PositiveSmallIntegerField( _(u'Status'), editable=False, default=STATUS_WAIT, choices=( (STATUS_WAIT, _(u'Waiting')), (STATUS_ERROR, _(u'Error')), (STATUS_OK, _(u'Completed successfully')), ), ) error = TextField(_(u'Error'), null=True, editable=False) raw_data = TextField( editable=False ) # It stores the Job's parameters # TODO: use a JSONField ? class Meta: app_label = 'creme_core' verbose_name = _(u'Job') verbose_name_plural = _(u'Jobs') # ordering = ('created',) ordering = ('id', ) def __init__(self, *args, **kwargs): # super(Job, self).__init__(*args, **kwargs) super().__init__(*args, **kwargs) if not self.language: self.language = get_language() self.__init_refreshing_cache() def __init_refreshing_cache(self): self._old_periodicity = self.periodicity self._old_reference_run = self.reference_run self._old_enabled = self.enabled def __str__(self): return str(self.type) def __repr__(self): return u'<Job type="{type}" id="{id}">'.format(type=self.type_id, id=self.id) def get_absolute_url(self): return reverse('creme_core__job', args=(self.id, )) def get_delete_absolute_url(self): return reverse('creme_core__delete_job', args=(self.id, )) def get_edit_absolute_url(self): return reverse('creme_core__edit_job', args=(self.id, )) @property def data(self): return jsonloads(self.raw_data) # TODO: cache @data.setter def data(self, value): self.raw_data = jsondumps(value) @property def description(self): # TODO: cache ? try: return self.type.get_description(self) except Exception: logger.exception( 'Error when building the description of the job id="%s"', self.id) return () def check_owner(self, user): return user.is_superuser or self.user == user def check_owner_or_die(self, user): if not self.check_owner(user): raise PermissionDenied('You are not the owner of this job') @property def is_finished(self): return self.status != self.STATUS_WAIT @classmethod def not_finished_jobs(cls, user): return cls.objects.filter(user=user, status=cls.STATUS_WAIT) @property def progress(self): jtype = self.type if jtype is not None: return jtype.progress(self) @property def real_periodicity(self): periodicity = self.periodicity if periodicity is None and self.user_id is None: periodicity = HoursPeriod(value=settings.PSEUDO_PERIOD) return periodicity def _update_ack_errors(self, incr): Job.objects.filter(id=self.id).update(ack_errors=F('ack_errors') + incr) def forget_ack_errors(self): self._update_ack_errors(-self.ack_errors) def get_config_form_class(self): "@see JobType.get_config_form_class()" jtype = self.type return jtype.get_config_form_class(self) if jtype is not None else None def refresh(self, force=False): """Ask to the JobManager to refresh the job if it's needed, because the next runs should be earlier, or disabled. @param force: Boolean ; <True> means the message is sent even if no field has changed. """ from ..core.job import JobManagerQueue queue_error = False enabled = self.enabled reference_run = self.reference_run periodicity = self.periodicity if self._old_enabled != enabled or \ self._old_reference_run != reference_run or \ self._old_periodicity != periodicity or \ force: # NB: we sent all the fields values in order to get a more robust system # (even if a REFRESH-message is lost, the next one is complete). data = { 'enabled': enabled, 'reference_run': dt_to_ISO8601(reference_run), } if periodicity: data['periodicity'] = periodicity.as_dict() queue_error = JobManagerQueue.get_main_queue().refresh_job( self, data) self.__init_refreshing_cache() return queue_error def update(self, refresh_data, date_period_registry=date_period_registry): """Update the fields with information generated by refresh(). Notice that the instance is not saved. @param refresh_data: Dictionary. See data sent on queue by refresh(). @param date_period_registry: Instance of creme_core.utils.date_period.DatePeriodRegistry. @return: True if the instance has changed. """ changed = False get = refresh_data.get enabled = get('enabled') if enabled is not None: if self.enabled != enabled: self.enabled = enabled changed = True ref_run_str = get('reference_run') if ref_run_str is not None: ref_run = dt_from_ISO8601(ref_run_str) if self.reference_run != ref_run: self.reference_run = ref_run changed = True periodicity_dict = get('periodicity') if periodicity_dict is not None: periodicity = date_period_registry.deserialize(periodicity_dict) if self.periodicity != periodicity: self.periodicity = periodicity changed = True return changed @atomic def save(self, *args, **kwargs): from ..core.job import JobManagerQueue created = self.pk is None if created and self.reference_run is None: self.reference_run = now() if self.user_id is None: # System job self.reference_run = round_hour(self.reference_run) # super(Job, self).save(*args, **kwargs) super().save(*args, **kwargs) queue_error = False if created: if self.user_id is not None: queue_error = JobManagerQueue.get_main_queue().start_job(self) elif self.user_id is None: # System job queue_error = self.refresh() if queue_error: self._update_ack_errors(1) @property def stats(self): jtype = self.type return jtype.get_stats(self) if jtype is not None else [] @property def type(self): from ..core.job import job_type_registry return job_type_registry.get(self.type_id) @type.setter def type(self, value): # TODO: check that it is in job_type_registry ? self.type_id = value.id
class Event(Schedulable): ''' An Event is a schedulable item with a conference model item as its payload. ''' objects = InheritanceManager() eventitem = ForeignKey(EventItem, on_delete=CASCADE, related_name="scheduler_events") starttime = DateTimeField(blank=True) max_volunteer = PositiveIntegerField(default=0) approval_needed = BooleanField(default=False) max_commitments = PositiveIntegerField(default=0) def has_commitment_space(self, commitment_class_name): from scheduler.models import Ordering return (Ordering.objects.filter( allocation__event=self, class_name=commitment_class_name).count() < self.max_commitments) @property def foreign_event_id(self): return self.eventitem.eventitem_id # New - fits scheduling API refactor def set_locations(self, locations): ''' Takes a LIST of locations, removes all existing location settings and replaces them with the given list. Locations are expected to be location items ''' from scheduler.models import ResourceAllocation for assignment in self.resources_allocated.all(): if assignment.resource.as_subtype.__class__.__name__ == "Location": assignment.delete() for location in locations: if location is not None: try: loc = Location.objects.select_subclasses().get( _item=location) except: loc = Location(_item=location) loc.save() ra = ResourceAllocation(resource=loc, event=self) ra.save() # New - from refactoring @property def people(self): people = [] for booking in self.resources_allocated.all(): if booking.resource.as_subtype.__class__.__name__ == "Worker": person = Person(booking=booking) if hasattr(booking, 'label'): person.label = booking.label.text people += [person] return people # New - from refactoring def allocate_person(self, person): ''' allocated worker for the new model - right now, focused on create uses the Person from the data_transfer objects. ''' from scheduler.idd import get_schedule from scheduler.models import ( Ordering, ResourceAllocation, ) warnings = [] time_format = GBE_DATETIME_FORMAT worker = None if person.public_id: item = WorkerItem.objects.get(pk=person.public_id) worker = Worker(_item=item, role=person.role) else: worker = Worker(_item=person.user.profile, role=person.role) # TODO is there a leak here? what happens to old workers # that aren't linked?? worker.save() if person.users: users = person.users else: users = [worker.workeritem.user_object] for user in users: for conflict in get_schedule( user=user, start_time=self.start_time, end_time=self.end_time).schedule_items: if not person.booking_id or (person.booking_id != conflict.booking_id): warnings += [ Warning(code="SCHEDULE_CONFLICT", user=user, occurrence=conflict.event) ] if person.booking_id: allocation = ResourceAllocation.objects.get(id=person.booking_id) allocation.resource = worker allocation.event = self else: allocation = ResourceAllocation(event=self, resource=worker) allocation.save() if person.commitment: ordering, created = Ordering.objects.get_or_create( allocation=allocation) if person.commitment.role is not None: ordering.role = person.commitment.role if person.commitment.order: ordering.order = person.commitment.order ordering.class_name = person.commitment.class_name ordering.class_id = person.commitment.class_id ordering.save() if self.extra_volunteers() > 0: warnings += [ Warning(code="OCCURRENCE_OVERBOOKED", details="Over booked by %s volunteers" % (self.extra_volunteers())) ] if person.label: # refactor from scheduler.models import Label l, created = Label.objects.get_or_create(allocation=allocation) l.text = person.label l.save() return BookingResponse(warnings=warnings, booking_id=allocation.pk, occurrence=self) def role_count(self, role="Volunteer"): allocations = self.resources_allocated.all() participants = allocations.filter(resource__worker__role=role).count() return participants @property def event_type_name(self): ''' Get event type name. Uses a database call ''' return self.event_type.__name__ @property def event_type(self): ''' Get event's underlying type (ie, conference model) ''' return type(self.as_subtype) @property def as_subtype(self): ''' Get the representation of this Event as its underlying conference type ''' return EventItem.objects.get_subclass(eventitem_id=self.eventitem_id) @property def duration(self): return self.eventitem.child().sched_duration def __str__(self): return self.eventitem.describe @property def location(self): l = Location.objects.filter(allocations__event=self) if len(l) > 0: return l[0]._item else: return None # or what?? def extra_volunteers(self): ''' The difference between the max suggested # of volunteers and the actual number > 0 if there are too many volunteers for the max. The number will be the # of people over booked (if there are 3 spaces, and 4 volunteers, the value returned is 1) = 0 if it is at capacity < 0 if it is fewer than the max, the abosolute value is the amount of space remaining (if there are 4 spaces, and 3 volunteers, the value will be -1) ''' count = Worker.objects.filter(allocations__event=self, role='Volunteer').count() return count - self.max_volunteer # New with Scheduler API @property def labels(self): return self.eventlabel_set.values_list('text', flat=True)
class SessionFormSubmission(AbstractFormSubmission): session_key = CharField(max_length=40, null=True, default=None) user = ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, related_name='+', on_delete=PROTECT) thumbnails_by_path = TextField(default=json.dumps({})) last_modification = DateTimeField(_('last modification'), auto_now=True) INCOMPLETE = 'incomplete' COMPLETE = 'complete' REVIEWED = 'reviewed' APPROVED = 'approved' REJECTED = 'rejected' STATUSES = ( (INCOMPLETE, _('Not submitted')), (COMPLETE, _('In progress')), (REVIEWED, _('Under consideration')), (APPROVED, _('Approved')), (REJECTED, _('Rejected')), ) status = CharField(max_length=10, choices=STATUSES, default=INCOMPLETE) class Meta: verbose_name = _('form submission') verbose_name_plural = _('form submissions') unique_together = (('page', 'session_key'), ('page', 'user')) @property def is_complete(self): return self.status != self.INCOMPLETE @property def form_page(self): return self.page.specific def get_session(self): return import_module( settings.SESSION_ENGINE).SessionStore(session_key=self.session_key) def reset_step(self): session = self.get_session() try: del session[self.form_page.current_step_session_key] except KeyError: pass else: session.save() def get_storage(self): return self.form_page.get_storage() def get_thumbnail_path(self, path, width=64, height=64): if not path: return '' variant = '%s×%s' % (width, height) thumbnails_by_path = json.loads(self.thumbnails_by_path) thumbnails_paths = thumbnails_by_path.get(path) if thumbnails_paths is None: thumbnails_by_path[path] = {} else: thumbnail_path = thumbnails_paths.get(variant) if thumbnail_path is not None: return thumbnail_path path = Path(path) thumbnail_path = str(path.with_suffix('.%s%s' % (variant, path.suffix))) storage = self.get_storage() thumbnail_path = storage.get_available_name(thumbnail_path) thumbnail = Image.open(storage.path(path)) thumbnail.thumbnail((width, height)) thumbnail.save(storage.path(thumbnail_path)) thumbnails_by_path[str(path)][variant] = thumbnail_path self.thumbnails_by_path = json.dumps(thumbnails_by_path, cls=StreamFormJSONEncoder) self.save() return thumbnail_path def get_fields(self, by_step=False): return self.form_page.get_form_fields(by_step=by_step) def get_existing_thumbnails(self, path): thumbnails_paths = json.loads(self.thumbnails_by_path).get(path, {}) for thumbnail_path in thumbnails_paths.values(): yield thumbnail_path def get_files_by_field(self): data = self.get_data(raw=True) files = {} for name, field in self.get_fields().items(): if isinstance(field, FileField): path = data.get(name) if path: files[name] = [path] + list( self.get_existing_thumbnails(path)) return files def get_all_files(self): for paths in self.get_files_by_field().values(): for path in paths: yield path def delete_file(self, field_name): thumbnails_by_path = json.loads(self.thumbnails_by_path) for path in self.get_files_by_field().get(field_name, ()): self.get_storage().delete(path) if path in thumbnails_by_path: del thumbnails_by_path[path] self.thumbnails_by_path = json.dumps(thumbnails_by_path, cls=StreamFormJSONEncoder) self.save() def render_email(self, value): return (mark_safe('<a href="mailto:%s" target="_blank">%s</a>') % (value, value)) def render_link(self, value): return (mark_safe('<a href="%s" target="_blank">%s</a>') % (value, value)) def render_image(self, value): storage = self.get_storage() return ( mark_safe('<a href="%s" target="_blank"><img src="%s" /></a>') % (storage.url(value), storage.url(self.get_thumbnail_path(value)))) def render_file(self, value): return mark_safe('<a href="%s" target="_blank">%s</a>') % ( self.get_storage().url(value), Path(value).name) def format_value(self, field, value): if value is None or value == '': return '-' new_value = self.form_page.format_value(field, value) if new_value != value: return new_value if value is True: return 'Yes' if value is False: return 'No' if isinstance(value, (list, tuple)): return ', '.join( [self.format_value(field, item) for item in value]) if isinstance(value, datetime.date): return naturaltime(value) if isinstance(field, EmailField): return self.render_email(value) if isinstance(field, URLField): return self.render_link(value) if isinstance(field, ImageField): return self.render_image(value) if isinstance(field, FileField): return self.render_file(value) if isinstance(value, SafeData) or hasattr(value, '__html__'): return value return str(value) def format_db_field(self, field_name, raw=False): method = getattr(self, 'get_%s_display' % field_name, None) if method is not None: return method() value = getattr(self, field_name) if raw: return value return self.format_value( self._meta.get_field(field_name).formfield(), value) def get_steps_data(self, raw=False): steps_data = json.loads(self.form_data) if raw: return steps_data fields_and_data_iterator = zip_longest(self.get_fields(by_step=True), steps_data, fillvalue={}) return [ OrderedDict([(name, self.format_value(field, step_data.get(name))) for name, field in step_fields.items()]) for step_fields, step_data in fields_and_data_iterator ] def get_extra_data(self, raw=False): return self.form_page.get_extra_data(self, raw=raw) def get_data(self, raw=False, add_metadata=True): steps_data = self.get_steps_data(raw=raw) form_data = {} form_data.update(self.get_extra_data(raw=raw)) for step_data in steps_data: form_data.update(step_data) if add_metadata: form_data.update( status=self.format_db_field('status', raw=raw), user=self.format_db_field('user', raw=raw), submit_time=self.format_db_field('submit_time', raw=raw), last_modification=self.format_db_field('last_modification', raw=raw), ) return form_data def steps_with_data_iterator(self, raw=False): for step, step_data_fields, step_data in zip( self.form_page.get_steps(), self.form_page.get_data_fields(by_step=True), self.get_steps_data(raw=raw)): yield step, [(field_name, field_label, step_data[field_name]) for field_name, field_label in step_data_fields]