class Reservation(ModifiableModel): CREATED = 'created' CANCELLED = 'cancelled' CONFIRMED = 'confirmed' MODIFIED = 'modified' DENIED = 'denied' REQUESTED = 'requested' STATE_CHOICES = ( (CREATED, _('created')), (CANCELLED, _('cancelled')), (CONFIRMED, _('confirmed')), (MODIFIED, _('modified')), (DENIED, _('denied')), (REQUESTED, _('requested')), ) resource = models.ForeignKey('Resource', verbose_name=_('Resource'), db_index=True, related_name='reservations', on_delete=models.PROTECT) begin = models.DateTimeField(verbose_name=_('Begin time')) end = models.DateTimeField(verbose_name=_('End time')) duration = pgfields.DateTimeRangeField( verbose_name=_('Length of reservation'), null=True, blank=True, db_index=True) comments = models.TextField(null=True, blank=True, verbose_name=_('Comments')) user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_('User'), null=True, blank=True, db_index=True, on_delete=models.PROTECT) state = models.CharField(max_length=16, choices=STATE_CHOICES, verbose_name=_('State'), default=CREATED) approver = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_('Approver'), related_name='approved_reservations', null=True, blank=True, on_delete=models.SET_NULL) staff_event = models.BooleanField(verbose_name=_('Is staff event'), default=False) # access-related fields access_code = models.CharField(verbose_name=_('Access code'), max_length=32, null=True, blank=True) # EXTRA FIELDS START HERE event_subject = models.CharField(max_length=200, verbose_name=_('Event subject'), blank=True) event_description = models.TextField(verbose_name=_('Event description'), blank=True) number_of_participants = models.PositiveSmallIntegerField( verbose_name=_('Number of participants'), blank=True, null=True) participants = models.TextField(verbose_name=_('Participants'), blank=True) host_name = models.CharField(verbose_name=_('Host name'), max_length=100, blank=True) reservation_extra_questions = models.TextField( verbose_name=_('Reservation extra questions'), blank=True) # extra detail fields for manually confirmed reservations reserver_name = models.CharField(verbose_name=_('Reserver name'), max_length=100, blank=True) reserver_id = models.CharField( verbose_name=_('Reserver ID (business or person)'), max_length=30, blank=True) reserver_email_address = models.EmailField( verbose_name=_('Reserver email address'), blank=True) reserver_phone_number = models.CharField( verbose_name=_('Reserver phone number'), max_length=30, blank=True) reserver_address_street = models.CharField( verbose_name=_('Reserver address street'), max_length=100, blank=True) reserver_address_zip = models.CharField( verbose_name=_('Reserver address zip'), max_length=30, blank=True) reserver_address_city = models.CharField( verbose_name=_('Reserver address city'), max_length=100, blank=True) company = models.CharField(verbose_name=_('Company'), max_length=100, blank=True) billing_address_street = models.CharField( verbose_name=_('Billing address street'), max_length=100, blank=True) billing_address_zip = models.CharField( verbose_name=_('Billing address zip'), max_length=30, blank=True) billing_address_city = models.CharField( verbose_name=_('Billing address city'), max_length=100, blank=True) # If the reservation was imported from another system, you can store the original ID in the field below. origin_id = models.CharField(verbose_name=_('Original ID'), max_length=50, editable=False, null=True) objects = ReservationQuerySet.as_manager() class Meta: verbose_name = _("reservation") verbose_name_plural = _("reservations") ordering = ('id', ) def _save_dt(self, attr, dt): """ Any DateTime object is converted to UTC time zone aware DateTime before save If there is no time zone on the object, resource's time zone will be assumed through its unit's time zone """ save_dt(self, attr, dt, self.resource.unit.time_zone) def _get_dt(self, attr, tz): return get_dt(self, attr, tz) @property def begin_tz(self): return self.begin @begin_tz.setter def begin_tz(self, dt): self._save_dt('begin', dt) def get_begin_tz(self, tz): return self._get_dt("begin", tz) @property def end_tz(self): return self.end @end_tz.setter def end_tz(self, dt): """ Any DateTime object is converted to UTC time zone aware DateTime before save If there is no time zone on the object, resource's time zone will be assumed through its unit's time zone """ self._save_dt('end', dt) def get_end_tz(self, tz): return self._get_dt("end", tz) def is_active(self): return self.end >= timezone.now() and self.state not in ( Reservation.CANCELLED, Reservation.DENIED) def is_own(self, user): if not (user and user.is_authenticated): return False return user == self.user def need_manual_confirmation(self): return self.resource.need_manual_confirmation def are_extra_fields_visible(self, user): # the following logic is used also implemented in ReservationQuerySet # so if this is changed that probably needs to be changed as well if self.is_own(user): return True return self.resource.can_view_reservation_extra_fields(user) def can_view_access_code(self, user): if self.is_own(user): return True return self.resource.can_view_access_codes(user) def set_state(self, new_state, user): # Make sure it is a known state assert new_state in (Reservation.REQUESTED, Reservation.CONFIRMED, Reservation.DENIED, Reservation.CANCELLED) old_state = self.state if new_state == old_state: if old_state == Reservation.CONFIRMED: reservation_modified.send(sender=self.__class__, instance=self, user=user) return if new_state == Reservation.CONFIRMED: self.approver = user reservation_confirmed.send(sender=self.__class__, instance=self, user=user) elif old_state == Reservation.CONFIRMED: self.approver = None user_is_staff = self.user is not None and self.user.is_staff # Notifications if new_state == Reservation.REQUESTED: self.send_reservation_requested_mail() self.send_reservation_requested_mail_to_officials() elif new_state == Reservation.CONFIRMED: if self.need_manual_confirmation(): self.send_reservation_confirmed_mail() elif self.access_code: self.send_reservation_created_with_access_code_mail() else: if not user_is_staff: # notifications are not sent from staff created reservations to avoid spam self.send_reservation_created_mail() elif new_state == Reservation.MODIFIED: self.send_reservation_modified_mail() elif new_state == Reservation.DENIED: self.send_reservation_denied_mail() elif new_state == Reservation.CANCELLED: if user != self.user: self.send_reservation_cancelled_mail() reservation_cancelled.send(sender=self.__class__, instance=self, user=user) self.state = new_state self.save() def can_modify(self, user): if not user: return False # reservations that need manual confirmation and are confirmed cannot be # modified or cancelled without reservation approve permission cannot_approve = not self.resource.can_approve_reservations(user) if self.need_manual_confirmation( ) and self.state == Reservation.CONFIRMED and cannot_approve: return False return self.user == user or self.resource.can_modify_reservations(user) def can_add_comment(self, user): if self.is_own(user): return True return self.resource.can_access_reservation_comments(user) def can_view_field(self, user, field): if field not in RESERVATION_EXTRA_FIELDS: return True if self.is_own(user): return True return self.resource.can_view_reservation_extra_fields(user) def can_view_catering_orders(self, user): if self.is_own(user): return True return self.resource.can_view_catering_orders(user) def format_time(self): tz = self.resource.unit.get_tz() begin = self.begin.astimezone(tz) end = self.end.astimezone(tz) return format_dt_range(translation.get_language(), begin, end) def __str__(self): if self.state != Reservation.CONFIRMED: state_str = ' (%s)' % self.state else: state_str = '' return "%s: %s%s" % (self.format_time(), self.resource, state_str) def clean(self, **kwargs): """ Check restrictions that are common to all reservations. If this reservation isn't yet saved and it will modify an existing reservation, the original reservation need to be provided in kwargs as 'original_reservation', so that it can be excluded when checking if the resource is available. """ if self.end <= self.begin: raise ValidationError( _("You must end the reservation after it has begun")) # Check that begin and end times are on valid time slots. opening_hours = self.resource.get_opening_hours( self.begin.date(), self.end.date()) for dt in (self.begin, self.end): days = opening_hours.get(dt.date(), []) day = next((day for day in days if day['opens'] is not None and day['opens'] <= dt <= day['closes']), None) if day and not is_valid_time_slot(dt, self.resource.slot_size, day['opens']): raise ValidationError( _("Begin and end time must match time slots"), code='invalid_time_slot') original_reservation = self if self.pk else kwargs.get( 'original_reservation', None) if self.resource.check_reservation_collision(self.begin, self.end, original_reservation): raise ValidationError( _("The resource is already reserved for some of the period")) if (self.end - self.begin) < self.resource.min_period: raise ValidationError( _("The minimum reservation length is %(min_period)s") % {'min_period': humanize_duration(self.resource.min_period)}) if self.access_code: validate_access_code(self.access_code, self.resource.access_code_type) def get_notification_context(self, language_code, user=None, notification_type=None): if not user: user = self.user with translation.override(language_code): reserver_name = self.reserver_name reserver_email_address = self.reserver_email_address if not reserver_name and self.user and self.user.get_display_name( ): reserver_name = self.user.get_display_name() if not reserver_email_address and user and user.email: reserver_email_address = user.email context = { 'resource': self.resource.name, 'begin': localize_datetime(self.begin), 'end': localize_datetime(self.end), 'begin_dt': self.begin, 'end_dt': self.end, 'time_range': self.format_time(), 'number_of_participants': self.number_of_participants, 'host_name': self.host_name, 'reserver_name': reserver_name, 'event_subject': self.event_subject, 'event_description': self.event_description, 'reserver_email_address': reserver_email_address, 'reserver_phone_number': self.reserver_phone_number, } if self.resource.unit: context['unit'] = self.resource.unit.name context['unit_id'] = self.resource.unit.id if self.can_view_access_code(user) and self.access_code: context['access_code'] = self.access_code if notification_type == NotificationType.RESERVATION_CONFIRMED: if self.resource.reservation_confirmed_notification_extra: context[ 'extra_content'] = self.resource.reservation_confirmed_notification_extra elif notification_type == NotificationType.RESERVATION_REQUESTED: if self.resource.reservation_requested_notification_extra: context[ 'extra_content'] = self.resource.reservation_requested_notification_extra # Get last main and ground plan images. Normally there shouldn't be more than one of each # of those images. images = self.resource.images.filter( type__in=('main', 'ground_plan')).order_by('-sort_order') main_image = next((i for i in images if i.type == 'main'), None) ground_plan_image = next( (i for i in images if i.type == 'ground_plan'), None) if main_image: main_image_url = main_image.get_full_url() if main_image_url: context['resource_main_image_url'] = main_image_url if ground_plan_image: ground_plan_image_url = ground_plan_image.get_full_url() if ground_plan_image_url: context[ 'resource_ground_plan_image_url'] = ground_plan_image_url return context def send_reservation_mail(self, notification_type, user=None, attachments=None): """ Stuff common to all reservation related mails. If user isn't given use self.user. """ try: notification_template = NotificationTemplate.objects.get( type=notification_type) except NotificationTemplate.DoesNotExist: return if user: email_address = user.email else: if not (self.reserver_email_address or self.user): return email_address = self.reserver_email_address or self.user.email user = self.user language = user.get_preferred_language() if user else DEFAULT_LANG context = self.get_notification_context( language, notification_type=notification_type) try: rendered_notification = notification_template.render( context, language) except NotificationTemplateException as e: logger.error(e, exc_info=True, extra={'user': user.uuid}) return send_respa_mail(email_address, rendered_notification['subject'], rendered_notification['body'], rendered_notification['html_body'], attachments) def send_reservation_requested_mail(self): self.send_reservation_mail(NotificationType.RESERVATION_REQUESTED) def send_reservation_requested_mail_to_officials(self): notify_users = self.resource.get_users_with_perm( 'can_approve_reservation') if len(notify_users) > 100: raise Exception("Refusing to notify more than 100 users (%s)" % self) for user in notify_users: self.send_reservation_mail( NotificationType.RESERVATION_REQUESTED_OFFICIAL, user=user) def send_reservation_modified_mail(self): self.send_reservation_mail(NotificationType.RESERVATION_MODIFIED) def send_reservation_denied_mail(self): self.send_reservation_mail(NotificationType.RESERVATION_DENIED) def send_reservation_confirmed_mail(self): reservations = [self] ical_file = build_reservations_ical_file(reservations) attachment = ('reservation.ics', ical_file, 'text/calendar') self.send_reservation_mail(NotificationType.RESERVATION_CONFIRMED, attachments=[attachment]) def send_reservation_cancelled_mail(self): self.send_reservation_mail(NotificationType.RESERVATION_CANCELLED) def send_reservation_created_mail(self): reservations = [self] ical_file = build_reservations_ical_file(reservations) attachment = 'reservation.ics', ical_file, 'text/calendar' self.send_reservation_mail(NotificationType.RESERVATION_CREATED, attachments=[attachment]) def send_reservation_created_with_access_code_mail(self): reservations = [self] ical_file = build_reservations_ical_file(reservations) attachment = 'reservation.ics', ical_file, 'text/calendar' self.send_reservation_mail( NotificationType.RESERVATION_CREATED_WITH_ACCESS_CODE, attachments=[attachment]) def send_access_code_created_mail(self): self.send_reservation_mail( NotificationType.RESERVATION_ACCESS_CODE_CREATED) def save(self, *args, **kwargs): self.duration = DateTimeTZRange(self.begin, self.end, '[)') if not self.access_code: access_code_type = self.resource.access_code_type if self.resource.is_access_code_enabled( ) and self.resource.generate_access_codes: self.access_code = generate_access_code(access_code_type) return super().save(*args, **kwargs)
class CastMember(models.Model): movie = models.ForeignKey(Movie, related_name='cast_members', on_delete=models.CASCADE) person = models.ForeignKey(Person, on_delete=models.CASCADE)
class PaymentBill(models.Model): user = models.ForeignKey(User, on_delete=models.CASCADE) value = models.FloatField()
class DummyGenericForeignKeyModel(models.Model): content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) object_id = models.PositiveIntegerField() content_object = GenericForeignKey('content_type', 'object_id')
class SchoolEnrollment(models.Model): start_date = models.DateField(auto_now_add=True) school = models.ForeignKey(School, on_delete=models.CASCADE) student = models.ForeignKey(Person, on_delete=models.CASCADE)
class Node(models.Model): """ Name is unique across all resources because it ties a node to values within tiles. Recommend prepending resource class to node name. """ nodeid = models.UUIDField(primary_key=True, default=uuid.uuid1) name = models.TextField() description = models.TextField(blank=True, null=True) istopnode = models.BooleanField() ontologyclass = models.TextField(blank=True, null=True) datatype = models.TextField() nodegroup = models.ForeignKey(NodeGroup, db_column='nodegroupid', blank=True, null=True) graph = models.ForeignKey(GraphModel, db_column='graphid', blank=True, null=True) config = JSONField(blank=True, null=True, db_column='config') issearchable = models.BooleanField(default=True) isrequired = models.BooleanField(default=False) def get_child_nodes_and_edges(self): """ gather up the child nodes and edges of this node returns a tuple of nodes and edges """ nodes = [] edges = [] for edge in Edge.objects.filter(domainnode=self): nodes.append(edge.rangenode) edges.append(edge) child_nodes, child_edges = edge.rangenode.get_child_nodes_and_edges( ) nodes.extend(child_nodes) edges.extend(child_edges) return (nodes, edges) @property def is_collector(self): return str(self.nodeid) == str( self.nodegroup_id) and self.nodegroup is not None def get_relatable_resources(self): relatable_resource_ids = [ r2r.resourceclassfrom for r2r in Resource2ResourceConstraint.objects.filter( resourceclassto_id=self.nodeid) ] relatable_resource_ids = relatable_resource_ids + [ r2r.resourceclassto for r2r in Resource2ResourceConstraint.objects.filter( resourceclassfrom_id=self.nodeid) ] return relatable_resource_ids def set_relatable_resources(self, new_ids): old_ids = [res.nodeid for res in self.get_relatable_resources()] for old_id in old_ids: if old_id not in new_ids: Resource2ResourceConstraint.objects.filter( Q(resourceclassto_id=self.nodeid) | Q(resourceclassfrom_id=self.nodeid), Q(resourceclassto_id=old_id) | Q(resourceclassfrom_id=old_id)).delete() for new_id in new_ids: if new_id not in old_ids: new_r2r = Resource2ResourceConstraint.objects.create( resourceclassfrom_id=self.nodeid, resourceclassto_id=new_id) new_r2r.save() class Meta: managed = True db_table = 'nodes'
class TrafficSignReal( DecimalValueFromDeviceTypeMixin, SourceControlModel, SoftDeleteModel, UserControlModel, ): id = models.UUIDField(primary_key=True, unique=True, editable=False, default=uuid.uuid4) traffic_sign_plan = models.ForeignKey( TrafficSignPlan, verbose_name=_("Traffic Sign Plan"), on_delete=models.PROTECT, blank=True, null=True, ) location = models.PointField(_("Location (3D)"), dim=3, srid=settings.SRID) height = models.IntegerField(_("Height"), blank=True, null=True) direction = models.IntegerField(_("Direction"), default=0) device_type = models.ForeignKey( TrafficControlDeviceType, verbose_name=_("Device type"), on_delete=models.PROTECT, limit_choices_to=Q( Q(target_model=None) | Q(target_model=DeviceTypeTargetModel.TRAFFIC_SIGN)), blank=False, null=True, ) value = models.DecimalField( _("Traffic Sign Code value"), max_digits=10, decimal_places=2, blank=True, null=True, ) legacy_code = models.CharField(_("Legacy Traffic Sign Code"), max_length=32, blank=True, null=True) txt = models.CharField(_("Txt"), max_length=254, blank=True, null=True) mount_real = models.ForeignKey( MountReal, verbose_name=_("Mount Real"), on_delete=models.PROTECT, blank=True, null=True, ) mount_type = models.ForeignKey( MountType, verbose_name=_("Mount type"), blank=True, null=True, on_delete=models.SET_NULL, ) installation_date = models.DateField(_("Installation date"), blank=True, null=True) installation_status = EnumField( InstallationStatus, verbose_name=_("Installation status"), max_length=10, blank=True, null=True, ) installation_id = models.CharField(_("Installation id"), max_length=254, blank=True, null=True) installation_details = models.CharField(_("Installation details"), max_length=254, blank=True, null=True) permit_decision_id = models.CharField(_("Permit decision id"), max_length=254, blank=True, null=True) validity_period_start = models.DateField(_("Validity period start"), blank=True, null=True) validity_period_end = models.DateField(_("Validity period end"), blank=True, null=True) condition = EnumIntegerField( Condition, verbose_name=_("Condition"), blank=True, null=True, ) coverage_area = models.ForeignKey( CoverageArea, verbose_name=_("Coverage area"), blank=True, null=True, on_delete=models.PROTECT, ) scanned_at = models.DateTimeField(_("Scanned at"), blank=True, null=True) size = EnumField( Size, verbose_name=_("Size"), max_length=1, blank=True, null=True, ) reflection_class = EnumField( Reflection, verbose_name=_("Reflection"), max_length=2, blank=True, null=True, ) surface_class = EnumField( Surface, verbose_name=_("Surface"), max_length=6, blank=True, null=True, ) seasonal_validity_period_start = models.DateField( _("Seasonal validity period start"), blank=True, null=True) seasonal_validity_period_end = models.DateField( _("Seasonal validity period end"), blank=True, null=True) owner = models.ForeignKey( "traffic_control.Owner", verbose_name=_("Owner"), blank=False, null=False, on_delete=models.PROTECT, ) manufacturer = models.CharField(_("Manufacturer"), max_length=254, blank=True, null=True) rfid = models.CharField(_("RFID"), max_length=254, blank=True, null=True) lifecycle = EnumIntegerField(Lifecycle, verbose_name=_("Lifecycle"), default=Lifecycle.ACTIVE) road_name = models.CharField(_("Road name"), max_length=254, blank=True, null=True) lane_number = EnumField(LaneNumber, verbose_name=_("Lane number"), default=LaneNumber.MAIN_1, blank=True) lane_type = EnumField( LaneType, verbose_name=_("Lane type"), default=LaneType.MAIN, blank=True, ) location_specifier = EnumIntegerField( LocationSpecifier, verbose_name=_("Location specifier"), default=LocationSpecifier.RIGHT, blank=True, null=True, ) operation = models.CharField(_("Operation"), max_length=64, blank=True, null=True) attachment_url = models.URLField(_("Attachment url"), max_length=500, blank=True, null=True) objects = TrafficSignRealQuerySet.as_manager() class Meta: db_table = "traffic_sign_real" verbose_name = _("Traffic Sign Real") verbose_name_plural = _("Traffic Sign Reals") unique_together = ["source_name", "source_id"] def __str__(self): return f"{self.id} {self.device_type}" def save(self, *args, **kwargs): if self.device_type and not self.device_type.validate_relation( DeviceTypeTargetModel.TRAFFIC_SIGN): raise ValidationError( f'Device type "{self.device_type}" is not allowed for traffic signs' ) if not self.device_type: self.device_type = ( TrafficControlDeviceType.objects.for_target_model( DeviceTypeTargetModel.TRAFFIC_SIGN).filter( legacy_code=self.legacy_code).order_by("code").first()) super().save(*args, **kwargs) def has_additional_signs(self): return self.additional_signs.active().exists() @transaction.atomic def soft_delete(self, user): super().soft_delete(user) self.additional_signs.soft_delete(user)
class Resource(models.Model): """Modèle de classe d'une ressource de données.""" class Meta(object): verbose_name = 'Ressource' verbose_name_plural = 'Ressources' # Managers # ======== objects = models.Manager() default = DefaultResourceManager() # Champs atributaires # =================== ckan_id = models.UUIDField( verbose_name='Ckan UUID', default=uuid.uuid4, editable=False, unique=True, ) title = models.TextField( verbose_name='Title', ) description = models.TextField( verbose_name='Description', blank=True, null=True, ) ftp_file = models.FileField( verbose_name='Fichier déposé sur FTP', blank=True, null=True, upload_to=_ftp_file_upload_to, ) referenced_url = models.URLField( verbose_name='Référencer une URL', max_length=2000, blank=True, null=True, ) dl_url = models.URLField( verbose_name='Télécharger depuis une URL', max_length=2000, blank=True, null=True, ) up_file = models.FileField( verbose_name='Téléverser un ou plusieurs fichiers', blank=True, null=True, upload_to=_up_file_upload_to, ) LANG_CHOICES = ( ('french', 'Français'), ('english', 'Anglais'), ('italian', 'Italien'), ('german', 'Allemand'), ('other', 'Autre')) lang = models.CharField( verbose_name='Langue', choices=LANG_CHOICES, default='french', max_length=10, ) format_type = models.ForeignKey( to='ResourceFormats', verbose_name='Format', blank=False, null=True, ) LEVEL_CHOICES = ( ('public', 'Tous les utilisateurs'), ('registered', 'Utilisateurs authentifiés'), ('only_allowed_users', 'Utilisateurs authentifiés avec droits spécifiques'), ('same_organization', 'Utilisateurs de cette organisation uniquement'), ('any_organization', 'Organisations spécifiées'), ) restricted_level = models.CharField( verbose_name="Restriction d'accès", choices=LEVEL_CHOICES, default='public', max_length=20, blank=True, null=True, ) profiles_allowed = models.ManyToManyField( to='Profile', verbose_name='Utilisateurs autorisés', blank=True, ) organisations_allowed = models.ManyToManyField( to='Organisation', verbose_name='Organisations autorisées', blank=True, ) dataset = models.ForeignKey( to='Dataset', verbose_name='Jeu de données', on_delete=models.SET_NULL, blank=True, null=True, ) bbox = models.PolygonField( verbose_name='Rectangle englobant', blank=True, null=True, srid=4171, ) geo_restriction = models.BooleanField( verbose_name='Restriction géographique', default=False, ) extractable = models.BooleanField( verbose_name='Extractible', default=True, ) ogc_services = models.BooleanField( verbose_name='Services OGC', default=True, ) created_on = models.DateTimeField( verbose_name='Date de création de la resource', blank=True, null=True, default=timezone.now, ) last_update = models.DateTimeField( verbose_name='Date de dernière modification de la resource', blank=True, null=True, ) TYPE_CHOICES = ( ('raw', 'Données brutes'), ('annexe', 'Documentation associée'), ('service', 'Service'), ) data_type = models.CharField( verbose_name='Type de la ressource', choices=TYPE_CHOICES, max_length=10, default='raw', ) synchronisation = models.BooleanField( verbose_name='Synchronisation de données distante', default=False, ) EXTRA_FREQUENCY_CHOICES = ( ('5mn', 'Toutes les 5 minutes'), ('15mn', 'Toutes les 15 minutes'), ('20mn', 'Toutes les 20 minutes'), ('30mn', 'Toutes les 30 minutes'), ) FREQUENCY_CHOICES = ( ('1hour', 'Toutes les heures'), ('3hours', 'Toutes les trois heures'), ('6hours', 'Toutes les six heures'), ('daily', 'Quotidienne (tous les jours à minuit)'), ('weekly', 'Hebdomadaire (tous les lundi)'), ('bimonthly', 'Bimensuelle (1er et 15 de chaque mois)'), ('monthly', 'Mensuelle (1er de chaque mois)'), ('quarterly', 'Trimestrielle (1er des mois de janvier, avril, juillet, octobre)'), ('biannual', 'Semestrielle (1er janvier et 1er juillet)'), ('annual', 'Annuelle (1er janvier)'), ('never', 'Jamais'), ) sync_frequency = models.CharField( verbose_name='Fréquence de synchronisation', max_length=20, blank=True, null=True, choices=FREQUENCY_CHOICES + EXTRA_FREQUENCY_CHOICES, default='never', ) crs = models.ForeignKey( to='SupportedCrs', verbose_name='CRS', on_delete=models.SET_NULL, blank=True, null=True, ) def __str__(self): return self.title # Propriétés # ========== _encoding = 'utf-8' @property def encoding(self): return self._encoding @encoding.setter def encoding(self, value): if value: self._encoding = value @property def filename(self): if self.ftp_file: return self.ftp_file.name if self.up_file: return self.up_file.name return '{}.{}'.format(slugify(self.title), self.format.lower()) @property def ckan_url(self): return urljoin(settings.CKAN_URL, 'dataset/{}/resource/{}/'.format( self.dataset.slug, self.ckan_id)) @property def api_location(self): kwargs = {'dataset_name': self.dataset.slug, 'resource_id': self.ckan_id} return reverse('api:resource_show', kwargs=kwargs) @property def title_overflow(self): return three_suspension_points(self.title) @property def anonymous_access(self): return self.restricted_level == 'public' @property def is_datagis(self): return self.get_layers() and True or False # Méthodes héritées # ================= def save(self, *args, current_user=None, synchronize=False, file_extras=None, skip_download=False, **kwargs): if 'update_fields' in kwargs: return super().save(*args, **kwargs) # Version précédante de la ressource (avant modification) previous, created = self.pk \ and (Resource.objects.get(pk=self.pk), False) or (None, True) if previous: # crs est immuable sauf si le jeu de données change (Cf. plus bas) self.crs = previous.crs # Quelques valeur par défaut à la création de l'instance if created or not ( # Ou si l'éditeur n'est pas partenaire du CRIGE current_user and current_user.profile.crige_membership): # Mais seulement s'il s'agit de données SIG, sauf # qu'on ne le sait pas encore... self.geo_restriction = False self.ogc_services = True self.extractable = True # La restriction au territoire de compétence désactive toujours les services OGC if self.geo_restriction: self.ogc_services = False self.last_update = timezone.now() if created: super().save(*args, **kwargs) kwargs['force_insert'] = False # Quelques contrôles sur les fichiers de données téléversée ou à télécharger filename = False content_type = None file_must_be_deleted = False # permet d'indiquer si les fichiers doivent être supprimés à la fin de la chaine de traitement publish_raw_resource = True # permet d'indiquer si les ressources brutes sont publiées dans CKAN if self.ftp_file and not skip_download: filename = self.ftp_file.file.name # Si la taille de fichier dépasse la limite autorisée, # on traite les données en fonction du type détecté if self.ftp_file.size > DOWNLOAD_SIZE_LIMIT: extension = self.format_type.extension.lower() if self.format_type.is_gis_format: try: gdalogr_obj = get_gdalogr_object(filename, extension) except NotDataGISError: # On essaye de traiter le jeux de données normalement, même si ça peut être long. pass else: if gdalogr_obj.__class__.__name__ == 'GdalOpener': s0 = str(self.ckan_id) s1, s2, s3 = s0[:3], s0[3:6], s0[6:] dir = os.path.join(CKAN_STORAGE_PATH, s1, s2) os.makedirs(dir, mode=0o777, exist_ok=True) shutil.copyfile(filename, os.path.join(dir, s3)) src = os.path.join(dir, s3) dst = os.path.join(dir, filename.split('/')[-1]) try: os.symlink(src, dst) except FileNotFoundError as e: logger.error(e) else: logger.debug('Created a symbolic link {dst} pointing to {src}.'.format(dst=dst, src=src)) # if gdalogr_obj.__class__.__name__ == 'OgrOpener': # On ne publie que le service OGC dans CKAN publish_raw_resource = False elif (self.up_file and file_extras): # GDAL/OGR ne semble pas prendre de fichier en mémoire.. # ..à vérifier mais si c'est possible comment indiquer le vsi en préfixe du filename ? filename = self.up_file.path self.save(update_fields=('up_file',)) file_must_be_deleted = True elif self.dl_url and not skip_download: try: directory, filename, content_type = download( self.dl_url, settings.MEDIA_ROOT, max_size=DOWNLOAD_SIZE_LIMIT) except SizeLimitExceededError as e: l = len(str(e.max_size)) if l > 6: m = '{0} mo'.format(Decimal(int(e.max_size) / 1024 / 1024)) elif l > 3: m = '{0} ko'.format(Decimal(int(e.max_size) / 1024)) else: m = '{0} octets'.format(int(e.max_size)) raise ValidationError(( 'La taille du fichier dépasse ' 'la limite autorisée : {0}.').format(m), code='dl_url') except Exception as e: if e.__class__.__name__ == 'HTTPError': if e.response.status_code == 404: msg = ('La ressource distante ne semble pas exister. ' "Assurez-vous que l'URL soit correcte.") if e.response.status_code == 403: msg = ("Vous n'avez pas l'autorisation pour " 'accéder à la ressource.') if e.response.status_code == 401: msg = ('Une authentification est nécessaire ' 'pour accéder à la ressource.') else: msg = 'Le téléchargement du fichier a échoué.' raise ValidationError(msg, code='dl_url') file_must_be_deleted = True # Synchronisation avec CKAN # ========================= # La synchronisation doit s'effectuer avant la publication des # éventuelles couches de données SIG car dans le cas des données # de type « raster », nous utilisons le filestore de CKAN. if synchronize and publish_raw_resource: self.synchronize( content_type=content_type, file_extras=file_extras, filename=filename, with_user=current_user) elif synchronize and not publish_raw_resource: url = reduce(urljoin, [ settings.CKAN_URL, 'dataset/', str(self.dataset.ckan_id) + '/', 'resource/', str(self.ckan_id) + '/', 'download/', Path(self.ftp_file.name).name]) self.synchronize(url=url, with_user=current_user) # Détection des données SIG # ========================= if filename: # On vérifie s'il s'agit de données SIG, uniquement pour # les extensions de fichier autorisées.. extension = self.format_type.extension.lower() if self.format_type.is_gis_format: # Si c'est le cas, on monte les données dans la base PostGIS dédiée # et on déclare la couche au service OGC:WxS de l'organisation. # Mais d'abord, on vérifie si la ressource contient # déjà des « Layers », auquel cas il faudra vérifier si # la table de données a changée. existing_layers = {} if not created: existing_layers = dict(( re.sub('^(\w+)_[a-z0-9]{7}$', '\g<1>', layer.name), layer.name) for layer in self.get_layers()) try: # C'est carrément moche mais c'est pour aller vite. # Il faudrait factoriser tout ce bazar et créer # un décorateur pour gérer le rool-back sur CKAN. try: gdalogr_obj = get_gdalogr_object(filename, extension) except NotDataGISError: tables = [] pass else: try: self.format_type = ResourceFormats.objects.get( extension=extension, ckan_format=gdalogr_obj.format) # except ResourceFormats.MultipleObjectsReturned: # pass except Exception: pass # ========================== # Jeu de données vectorielle # ========================== if gdalogr_obj.__class__.__name__ == 'OgrOpener': # On convertit les données vers PostGIS try: tables = ogr2postgis( gdalogr_obj, update=existing_layers, epsg=self.crs and self.crs.auth_code or None, encoding=self.encoding) except NotOGRError as e: logger.warning(e) file_must_be_deleted and remove_file(filename) msg = ( "Le fichier reçu n'est pas reconnu " 'comme étant un jeu de données SIG correct.') raise ValidationError(msg, code='__all__') except DataDecodingError as e: logger.warning(e) file_must_be_deleted and remove_file(filename) msg = ( 'Impossible de décoder correctement les ' "données. Merci d'indiquer l'encodage " 'ci-dessous.') raise ValidationError(msg, code='encoding') except WrongDataError as e: logger.warning(e) file_must_be_deleted and remove_file(filename) msg = ( 'Votre ressource contient des données SIG que ' 'nous ne parvenons pas à lire correctement. ' 'Un ou plusieurs objets sont erronés.') raise ValidationError(msg) except NotFoundSrsError as e: logger.warning(e) file_must_be_deleted and remove_file(filename) msg = ( 'Votre ressource semble contenir des données SIG ' 'mais nous ne parvenons pas à détecter le système ' 'de coordonnées. Merci de sélectionner le code du ' 'CRS dans la liste ci-dessous.') raise ValidationError(msg, code='crs') except NotSupportedSrsError as e: logger.warning(e) file_must_be_deleted and remove_file(filename) msg = ( 'Votre ressource semble contenir des données SIG ' 'mais le système de coordonnées de celles-ci ' "n'est pas supporté par l'application.") raise ValidationError(msg, code='__all__') except ExceedsMaximumLayerNumberFixedError as e: logger.warning(e) file_must_be_deleted and remove_file(filename) raise ValidationError(e.__str__(), code='__all__') else: # Ensuite, pour tous les jeux de données SIG trouvés, # on crée le service ows à travers la création de `Layer` try: Layer = apps.get_model(app_label='idgo_admin', model_name='Layer') for table in tables: try: Layer.objects.get( name=table['id'], resource=self) except Layer.DoesNotExist: save_opts = {'synchronize': synchronize} Layer.vector.create( bbox=table['bbox'], name=table['id'], resource=self, save_opts=save_opts) except Exception as e: logger.error(e) file_must_be_deleted and remove_file(filename) for table in tables: drop_table(table['id']) raise e # ========================== # Jeu de données matricielle # ========================== if gdalogr_obj.__class__.__name__ == 'GdalOpener': coverage = gdalogr_obj.get_coverage() try: tables = [gdalinfo( coverage, update=existing_layers, epsg=self.crs and self.crs.auth_code or None)] except NotFoundSrsError as e: logger.warning(e) file_must_be_deleted and remove_file(filename) msg = ( 'Votre ressource semble contenir des données SIG ' 'mais nous ne parvenons pas à détecter le système ' 'de coordonnées. Merci de sélectionner le code du ' 'CRS dans la liste ci-dessous.') raise ValidationError(msg, code='crs') except NotSupportedSrsError as e: logger.warning(e) file_must_be_deleted and remove_file(filename) msg = ( 'Votre ressource semble contenir des données SIG ' 'mais le système de coordonnées de celles-ci ' "n'est pas supporté par l'application.") raise ValidationError(msg, code='__all__') # Super Crado Code s0 = str(self.ckan_id) s1, s2, s3 = s0[:3], s0[3:6], s0[6:] dir = os.path.join(CKAN_STORAGE_PATH, s1, s2) src = os.path.join(dir, s3) dst = os.path.join(dir, filename.split('/')[-1]) try: os.symlink(src, dst) except FileExistsError as e: logger.warning(e) except FileNotFoundError as e: logger.error(e) else: logger.debug('Created a symbolic link {dst} pointing to {src}.'.format(dst=dst, src=src)) try: Layer = apps.get_model(app_label='idgo_admin', model_name='Layer') for table in tables: try: Layer.objects.get( name=table['id'], resource=self) except Layer.DoesNotExist: Layer.raster.create( bbox=table['bbox'], name=table['id'], resource=self) except Exception as e: logger.error(e) file_must_be_deleted and remove_file(filename) raise e except Exception as e: if created: if current_user: username = current_user.username apikey = CkanHandler.get_user(username)['apikey'] with CkanUserHandler(apikey) as ckan: ckan.delete_resource(str(self.ckan_id)) else: CkanHandler.delete_resource(str(self.ckan_id)) for layer in self.get_layers(): layer.delete(current_user=current_user) # Puis on « raise » l'erreur raise e # On met à jour les champs de la ressource SupportedCrs = apps.get_model(app_label='idgo_admin', model_name='SupportedCrs') crs = [ SupportedCrs.objects.get( auth_name='EPSG', auth_code=table['epsg']) for table in tables] # On prend la première valeur (c'est moche) self.crs = crs and crs[0] or None # Si les données changent.. if existing_layers and \ set(previous.get_layers()) != set(self.get_layers()): # on supprime les anciens `layers`.. for layer in previous.get_layers(): layer.delete() #### if self.get_layers(): extent = self.get_layers().aggregate(models.Extent('bbox')).get('bbox__extent') if extent: xmin, ymin = extent[0], extent[1] xmax, ymax = extent[2], extent[3] setattr(self, 'bbox', bounds_to_wkt(xmin, ymin, xmax, ymax)) else: # Si la ressource n'est pas de type SIG, on passe les trois arguments # qui concernent exclusivement ces dernières à « False ». self.geo_restriction = False self.ogc_services = False self.extractable = False super().save(*args, **kwargs) # Puis dans tous les cas.. # on met à jour le statut des couches du service cartographique.. if not created: self.update_enable_layers_status() # on supprime les données téléversées ou téléchargées.. if file_must_be_deleted: remove_file(filename) # [Crado] on met à jour la ressource CKAN if synchronize: CkanHandler.update_resource( str(self.ckan_id), extracting_service=str(self.extractable)) for layer in self.get_layers(): layer.save(synchronize=synchronize) self.dataset.date_modification = timezone.now().date() self.dataset.save(current_user=None, synchronize=True, update_fields=['date_modification']) def delete(self, *args, current_user=None, **kwargs): with_user = current_user for layer in self.get_layers(): layer.delete(current_user=current_user) # On supprime la ressource CKAN ckan_id = str(self.ckan_id) if with_user: username = with_user.username apikey = CkanHandler.get_user(username)['apikey'] with CkanUserHandler(apikey=apikey) as ckan_user: ckan_user.delete_resource(ckan_id) else: CkanHandler.delete_resource(ckan_id) # On supprime l'instance super().delete(*args, **kwargs) self.dataset.date_modification = timezone.now().date() self.dataset.save(current_user=current_user, synchronize=True, update_fields=['date_modification']) # Autres méthodes # =============== def synchronize(self, url=None, filename=None, content_type=None, file_extras=None, with_user=None): """Synchronizer le jeu de données avec l'instance de CKAN.""" # Identifiant de la resource CKAN : id = str(self.ckan_id) # Définition des propriétés du « package » : data = { 'crs': self.crs and self.crs.description or '', 'name': self.title, 'description': self.description, 'data_type': self.data_type, 'extracting_service': 'False', # I <3 CKAN 'format': self.format_type and self.format_type.ckan_format, 'view_type': self.format_type and self.format_type.ckan_view, 'id': id, 'lang': self.lang, 'restricted_by_jurisdiction': str(self.geo_restriction), 'url': url and url or '', 'api': '{}'} # TODO: Factoriser # (0) Aucune restriction if self.restricted_level == 'public': restricted = json.dumps({'level': 'public'}) # (1) Uniquement pour un utilisateur connecté elif self.restricted_level == 'registered': restricted = json.dumps({'level': 'registered'}) # (2) Seulement les utilisateurs indiquées elif self.restricted_level == 'only_allowed_users': restricted = json.dumps({ 'allowed_users': ','.join( self.profiles_allowed.exists() and [ p.user.username for p in self.profiles_allowed.all()] or []), 'level': 'only_allowed_users'}) # (3) Les utilisateurs de cette organisation elif self.restricted_level == 'same_organization': restricted = json.dumps({ 'allowed_users': ','.join( get_all_users_for_organisations( self.organisations_allowed.all())), 'level': 'only_allowed_users'}) # (3) Les utilisateurs des organisations indiquées elif self.restricted_level == 'any_organization': restricted = json.dumps({ 'allowed_users': ','.join( get_all_users_for_organisations( self.organisations_allowed.all())), 'level': 'only_allowed_users'}) data['restricted'] = restricted if self.referenced_url: data['url'] = self.referenced_url if self.dl_url and filename: downloaded_file = File(open(filename, 'rb')) data['upload'] = downloaded_file data['size'] = downloaded_file.size data['mimetype'] = content_type if self.up_file and file_extras: data['upload'] = self.up_file.file data['size'] = file_extras.get('size') data['mimetype'] = file_extras.get('mimetype') if self.ftp_file: if not url: data['upload'] = self.ftp_file.file data['size'] = self.ftp_file.size if self.format_type and len(self.format_type.mimetype): data['mimetype'] = self.format_type.mimetype[0] else: data['mimetype'] = 'text/plain' # Passe dans un validateur pour forcer en base : url_type='upload' data['force_url_type'] = 'upload' if self.data_type == 'raw': if self.ftp_file or self.dl_url or self.up_file: data['resource_type'] = 'file.upload' elif self.referenced_url: data['resource_type'] = 'file' if self.data_type == 'annexe': data['resource_type'] = 'documentation' if self.data_type == 'service': data['resource_type'] = 'api' ckan_package = CkanHandler.get_package(str(self.dataset.ckan_id)) if with_user: username = with_user.username apikey = CkanHandler.get_user(username)['apikey'] with CkanUserHandler(apikey=apikey) as ckan: ckan.publish_resource(ckan_package, **data) else: return CkanHandler.publish_resource(ckan_package, **data) def get_layers(self, **kwargs): Layer = apps.get_model(app_label='idgo_admin', model_name='Layer') return Layer.objects.filter(resource=self, **kwargs) def update_enable_layers_status(self): for layer in self.get_layers(): layer.handle_enable_ows_status() def is_profile_authorized(self, user): Profile = apps.get_model(app_label='idgo_admin', model_name='Profile') if not user.pk: raise IntegrityError('User does not exists') if self.restricted_level == 'only_allowed_users' and self.profiles_allowed.exists(): return user in [ p.user for p in self.profiles_allowed.all()] elif self.restricted_level in ('same_organization', 'any_organization') and self.organisations_allowed.exists(): return user in [p.user for p in Profile.objects.filter( organisation__in=self.organisations_allowed.all(), organisation__is_active=True)] return True
class Presentation(models.Model): '''GP prescribing products. Import from BNF codes file from BSA. ADQs imported from BSA data. Where codes have changed or otherwise been mapped, the `replaced_by` field has a value. ''' bnf_code = models.CharField(max_length=15, primary_key=True, validators=[isAlphaNumeric]) name = models.CharField(max_length=200) is_generic = models.NullBooleanField(default=None) active_quantity = models.FloatField(null=True, blank=True) adq = models.FloatField(null=True, blank=True) adq_unit = models.CharField(max_length=10, null=True, blank=True) is_current = models.BooleanField(default=True) percent_of_adq = models.FloatField(null=True, blank=True) replaced_by = models.ForeignKey('self', null=True, blank=True) objects = PresentationManager() def __str__(self): return '%s: %s' % (self.bnf_code, self.product_name) def save(self, *args, **kwargs): if len(self.bnf_code) > 10: code = self.bnf_code[9:11] is_generic = (code == 'AA') else: is_generic = None self.is_generic = is_generic super(Presentation, self).save(*args, **kwargs) @property def current_version(self): """BNF codes are replaced over time. Return the most recent version the code. """ version = self next_version = self.replaced_by seen = [] while next_version: if next_version in seen: break # avoid loops else: seen.append(next_version) version = next_version next_version = version.replaced_by return version @property def dmd_product(self): if self.is_generic: concept_class = 1 else: concept_class = 2 # Sometimes we get more than one DMD+D VMP for a single BNF # code. This is usually where something is available in a # suspension or a solution, or similar clinical equivalencies, # so we just pick the first one. return DMDProduct.objects.filter(bnf_code=self.bnf_code, concept_class=concept_class).first() @property def product_name(self): if self.dmd_product: name = self.dmd_product.name else: try: name = Presentation.objects.get(bnf_code=self.bnf_code).name except Presentation.DoesNotExist: name = "n/a" return name class Meta: app_label = 'frontend'
class Trip(WithGalleryMixin, models.Model): """Jízdy""" DIRECTIONS = [ ("trip_to", _("Tam")), ("trip_from", _("Zpět")), ("recreational", _("Výlet")), ] DIRECTIONS_DICT = dict(DIRECTIONS) class Meta: verbose_name = _("Jízda") verbose_name_plural = _("Jízdy") unique_together = (("user_attendance", "date", "direction"),) ordering = ("date", "-direction") indexes = [ models.Index(fields=["date", "-direction", "-id", "source_application"]), ] objects = BulkUpdateManager() user_attendance = models.ForeignKey( "UserAttendance", related_name="user_trips", null=True, blank=False, default=None, on_delete=models.CASCADE, ) direction = models.CharField( verbose_name=_("Směr cesty"), choices=DIRECTIONS, max_length=20, default=None, null=False, blank=False, ) date = models.DateField( verbose_name=_("Datum cesty"), default=datetime.date.today, null=False, ) commute_mode = models.ForeignKey( "CommuteMode", verbose_name=_("Dopravní prostředek"), on_delete=models.CASCADE, default=1, null=False, blank=False, ) track = models.MultiLineStringField( verbose_name=_("trasa"), help_text=MAP_DESCRIPTION, srid=4326, null=True, blank=True, geography=True, ) gpx_file = models.FileField( verbose_name=_("GPX soubor"), help_text=_( mark_safe( "Zadat trasu nahráním souboru GPX. " "Pro vytvoření GPX souboru s trasou můžete použít vyhledávání na naší " "<a href='https://mapa.prahounakole.cz/#hledani' target='_blank'>mapě</a>." ), ), upload_to=normalize_gpx_filename, blank=True, null=True, max_length=512, ) distance = models.FloatField( verbose_name=_("Ujetá vzdálenost (km)"), null=True, blank=True, default=None, validators=[ MaxValueValidator(1000), MinValueValidator(0), ], ) duration = models.PositiveIntegerField( verbose_name=_("Doba v sekundách"), null=True, blank=True, ) from_application = models.BooleanField( verbose_name=_("Nahráno z aplikace"), default=False, null=False, ) source_application = models.CharField( verbose_name=_("Zdrojová aplikace"), max_length=255, null=True, blank=True, ) source_id = models.CharField( verbose_name=_("Identifikátor v původní aplikaci"), max_length=255, null=True, blank=True, ) created = models.DateTimeField( verbose_name=_("Datum vytvoření"), auto_now_add=True, null=True, ) updated = models.DateTimeField( verbose_name=_("Datum poslední změny"), auto_now=True, null=True, ) gallery = models.ForeignKey( "photologue.Gallery", verbose_name=_("Galerie fotek"), null=True, blank=True, on_delete=models.CASCADE, ) description = models.TextField( verbose_name=_("Popis cesty"), default="", null=False, blank=True, ) def active(self): return self.user_attendance.campaign.day_active(self.date) def get_commute_mode_display(self): if self.commute_mode.slug == "no_work" and self.date > util.today(): return _("Dovolená") return str(self.commute_mode) def get_direction_display(self): return self.DIRECTIONS_DICT[self.direction] def duration_minutes(self): if self.duration is None: return 0 return self.duration / 60 def get_application_link(self): app_links = { "strava": "https://www.strava.com/", "urbancyclers": "https://play.google.com/store/apps/details?id=com.umotional.bikeapp", "SuperLife": "https://play.google.com/store/apps/details?id=cz.cncenter.superlife", } if self.source_application in app_links: return app_links[self.source_application] def get_trip_link(self): if self.source_application == "strava": return "<a href='%sactivities/%s'>View on Strava</a>" % ( self.get_application_link(), self.source_id, ) return ""
class Medium(models.Model): objects = models.Manager() # Helps Pycharm CE auto-completion PHOTO = 'P' VIDEO = 'V' MEDIUM_TYPES = ((PHOTO, 'Photo'), (VIDEO, 'Video')) file = models.ForeignKey(File, null=True, blank=True, on_delete=models.PROTECT) location = models.PointField( null=True, blank=True, help_text='Location where the photo/video was taken') height = models.IntegerField(null=True, blank=True, help_text='Height of the image/video') width = models.IntegerField(null=True, blank=True, help_text='Width of the image/video') datetime_taken = models.DateTimeField(null=True, blank=True) datetime_imported = models.DateTimeField() public = models.BooleanField(default=False) photographer = models.ForeignKey(Photographer, null=True, on_delete=models.PROTECT) license = models.ForeignKey(License, null=True, blank=True, on_delete=models.PROTECT) copyright = models.ForeignKey(Copyright, null=True, blank=True, on_delete=models.PROTECT) tags = models.ManyToManyField(Tag, blank=True) medium_type = models.CharField(max_length=1, choices=MEDIUM_TYPES) duration = models.IntegerField( null=True, blank=True, help_text='Duration of the videos, None for photos') def latitude(self): if self.location is None: return None return self.location.y def longitude(self): if self.location is None: return None return self.location.x def preview(self): from main.medium_for_view import MediumForView medium_for_view: MediumForView = MediumForView.objects.get(id=self.pk) if medium_for_view.medium_type == Medium.PHOTO: return mark_safe('<img src="{}">'.format( escape(medium_for_view.thumbnail_url()))) elif medium_for_view.medium_type == Medium.VIDEO: return mark_safe('''<video controls> <source src='{}'> type="{}" </video> '''.format(escape(medium_for_view.thumbnail_url()), escape(medium_for_view.thumbnail_content_type()))) else: assert False def update_fields(self, fields): set_fields(self, fields) def get_absolute_url(self): return reverse('medium', args=[str(self.pk)]) # def delete(self, **kwargs): # See DeleteMedium class in delete_medium.py for a deletion of a medium and dependencies # pass def __str__(self): return '{}'.format(self.pk) class Meta: verbose_name_plural = 'Media'
class AccountActions(models.Model): key = models.UUIDField( verbose_name="Clé", editable=False, default=uuid.uuid4, ) ACTION_CHOICES = ( ('confirm_mail', "Confirmation de l'e-mail par l'utilisateur"), ('confirm_new_organisation', "Confirmation par un administrateur de la création d'une organisation par l'utilisateur" ), ('confirm_rattachement', "Rattachement d'un utilisateur à une organisation par un administrateur" ), ('confirm_referent', "Confirmation du rôle de réferent d'une organisation pour un utilisateur par un administrateur" ), ('confirm_contribution', "Confirmation du rôle de contributeur d'une organisation pour un utilisateur par un administrateur" ), ('reset_password', "Réinitialisation du mot de passe"), ('set_password_admin', "Initialisation du mot de passe suite à une inscription par un administrateur" ), ('created_organisation_through_api', "Création d'une organisation depuis l'API"), ) action = models.CharField( verbose_name="Action de gestion de profile", max_length=250, blank=True, null=True, choices=ACTION_CHOICES, default='confirm_mail', ) created_on = models.DateTimeField( verbose_name="Date de création", blank=True, null=True, auto_now_add=True, ) closed = models.DateTimeField( verbose_name="Date de validation", blank=True, null=True, ) profile = models.ForeignKey( to='Profile', on_delete=models.CASCADE, blank=True, null=True, ) # Pour pouvoir reutiliser AccountActions pour demandes post-inscription organisation = models.ForeignKey( to='Organisation', verbose_name="Organisation", blank=True, null=True, on_delete=models.CASCADE, ) def orga_name(self): return self.organisation and str(self.organisation.legal_name) or 'N/A' orga_name.short_description = "Nom de l'organisation concernée" def get_path(self): action = { 'confirm_mail': ( 'confirmation_mail', { 'key': self.key }, ), 'confirm_new_organisation': ( 'confirm_new_orga', { 'key': self.key }, ), 'confirm_rattachement': ( 'confirm_rattachement', { 'key': self.key }, ), 'confirm_referent': ( 'confirm_referent', { 'key': self.key }, ), 'confirm_contribution': ( 'confirm_contribution', { 'key': self.key }, ), 'reset_password': ( 'password_manager', { 'key': self.key, 'process': 'reset' }, ), 'set_password_admin': ( 'password_manager', { 'key': self.key, 'process': 'initiate' }, ), } return reverse( 'idgo_admin:{action}'.format(action=action[self.action][0]), kwargs=action[self.action][1]) get_path.short_description = "Adresse de validation"
class Profile(models.Model): # TODO: Surcharger la classe User avec Profile class Meta(object): verbose_name = "Profil utilisateur" verbose_name_plural = "Profils des utilisateurs" user = models.OneToOneField(User, on_delete=models.CASCADE) organisation = models.ForeignKey( to='Organisation', verbose_name="Organisation d'appartenance", blank=True, null=True, on_delete=models.SET_NULL, ) referents = models.ManyToManyField( to='Organisation', through='LiaisonsReferents', related_name='profile_referents', verbose_name="Organisation dont l'utilisateur est réferent", ) contributions = models.ManyToManyField( to='Organisation', through='LiaisonsContributeurs', related_name='profile_contributions', verbose_name="Organisation dont l'utilisateur est contributeur", ) phone = models.CharField( verbose_name="Téléphone", max_length=10, blank=True, null=True, ) is_active = models.BooleanField( verbose_name="Validation suite à confirmation par e-mail", default=False, ) membership = models.BooleanField( verbose_name="Utilisateur rattaché à une organisation", default=False, ) crige_membership = models.BooleanField( verbose_name="Utilisateur partenaire IDGO", default=False, ) is_admin = models.BooleanField( verbose_name="Administrateur métier", default=False, ) sftp_password = models.CharField( verbose_name="Mot de passe sFTP", default="CHANGEME", max_length=10, blank=True, null=True, ) def __str__(self): return "{} ({})".format(self.user.get_full_name(), self.user.username) # Propriétés # ========== @property def is_agree_with_terms(self): Gdpr = apps.get_model(app_label='idgo_admin', model_name='Gdpr') GdprUser = apps.get_model(app_label='idgo_admin', model_name='GdprUser') try: GdprUser.objects.get(user=self.user, gdpr=Gdpr.objects.latest('issue_date')) except GdprUser.DoesNotExist: return False else: return True @property def is_referent(self): kwargs = {'profile': self, 'validated_on__isnull': False} return LiaisonsReferents.objects.filter(**kwargs).exists() @property def referent_for(self): return LiaisonsReferents.get_subordinated_organisations(profile=self) @property def strict_referent_for(self): # Temporaire (A supprimer) kwargs = {'profile': self, 'validated_on__isnull': False} return [ e.organisation for e in LiaisonsReferents.objects.filter(**kwargs) ] @property def is_contributor(self): kwargs = {'profile': self, 'validated_on__isnull': False} return LiaisonsContributeurs.objects.filter(**kwargs).exists() @property def contribute_for(self): return LiaisonsContributeurs.get_contribs(profile=self) @property def is_idgo_admin(self): return self.is_admin and self.crige_membership @property def is_idgo_partner(self): return self.crige_membership @property def is_ftp_account_exists(self): if ENABLE_FTP_INHERENT: return True return self.sftp_password and True or False @property def api_location(self): kwargs = {'username': self.user.username} return reverse('api:user_show', kwargs=kwargs) # Méthodes de classe # ================== @classmethod def get_idgo_membership(cls): return Profile.objects.filter(is_active=True, crige_membership=True) # Autres méthodes # =============== def get_roles(self, organisation=None, dataset=None): if organisation: is_referent = LiaisonsReferents.objects.filter( profile=self, organisation=organisation, validated_on__isnull=False).exists() else: is_referent = LiaisonsReferents.objects.filter( profile=self, validated_on__isnull=False).exists() return { 'is_admin': self.is_admin, 'is_referent': is_referent, 'is_editor': (self.user == dataset.editor) if dataset else False } def is_referent_for(self, organisation): kwargs = { 'organisation': organisation, 'profile': self, 'validated_on__isnull': False } return LiaisonsReferents.objects.filter(**kwargs).exists() @property def awaiting_member_status(self): try: action = AccountActions.objects.get(action='confirm_rattachement', profile=self, closed__isnull=True) except Exception: return return action.organisation @property def awaiting_contributor_status(self): return LiaisonsContributeurs.get_pending(profile=self) @property def awaiting_referent_statut(self): return LiaisonsReferents.get_pending(profile=self) # Actions sur le compte sFTP @property def change_password_allowed(self): """ mot de passe n'est changeable que chez WL, mechanisme json_file """ return FTP_MECHANISM == "json_file" def create_ftp_account(self, change=False): mechanism = FTP_MECHANISM user_prefix = FTP_USER_PREFIX if mechanism == 'cgi': params = { 'action': 'create', 'login': user_prefix + self.user.username } r = requests.get(FTP_SERVICE_URL, params=params) if r.status_code == 200: details = r.json() self.sftp_password = details.get('message') self.save() elif mechanism == "json_file": operation = "modify" if change else "create" password = sftp_user_operation(operation, user_prefix + self.user.username) self.sftp_password = "******" self.save() return password def delete_ftp_account(self): mechanism = FTP_MECHANISM user_prefix = FTP_USER_PREFIX if mechanism == 'cgi': params = { 'action': 'delete', 'login': user_prefix + self.user.username } r = requests.get(FTP_SERVICE_URL, params=params) if r.status_code == 200: self.sftp_password = None self.save() elif mechanism == "json_file": operation = "delete" password = sftp_user_operation(operation, user_prefix + self.user.username) self.sftp_password = None self.save()
class LiaisonsContributeurs(models.Model): class Meta(object): verbose_name = "Statut de contributeur" verbose_name_plural = "Statuts de contributeur" unique_together = (('profile', 'organisation'), ) profile = models.ForeignKey( to='Profile', verbose_name="Profil utilisateur", on_delete=models.CASCADE, ) organisation = models.ForeignKey( to='Organisation', verbose_name="Organisation", on_delete=models.CASCADE, ) created_on = models.DateField( verbose_name="Date de la demande de statut de contributeur", auto_now_add=True, ) validated_on = models.DateField( verbose_name="Date de la confirmation par un administrateur", blank=True, null=True, ) def __str__(self): return '{full_name} ({username})--{organisation}'.format( full_name=self.profile.user.get_full_name(), username=self.profile.user.username, organisation=self.organisation.legal_name, ) # Méthodes de classe # ================== @classmethod def get_contribs(cls, profile): kwargs = {'profile': profile, 'validated_on__isnull': False} return [ e.organisation for e in LiaisonsContributeurs.objects.filter(**kwargs) ] @classmethod def get_contributors(cls, organisation): kwargs = {'organisation': organisation, 'validated_on__isnull': False} return [ e.profile for e in LiaisonsContributeurs.objects.filter(**kwargs) ] @classmethod def get_pending(cls, profile): kwargs = {'profile': profile, 'validated_on': None} return [ e.organisation for e in LiaisonsContributeurs.objects.filter(**kwargs) ]
class MetricsIngest(models.Model): """Tracks all the ingest metrics grouped by strike process. :keyword strike: The strike process associated with these metrics. :type strike: :class:`django.db.models.ForeignKey` :keyword occurred: The date when the ingests included in this model were ended. :type occurred: :class:`django.db.models.DateField` :keyword deferred_count: The total number of deferred ingests. :type deferred_count: :class:`metrics.models.PlotBigIntegerField` :keyword ingested_count: The total number of successfully completed ingests. :type ingested_count: :class:`metrics.models.PlotBigIntegerField` :keyword errored_count: The total number of failed ingests. :type errored_count: :class:`metrics.models.PlotBigIntegerField` :keyword duplicate_count: The total number of duplicated ingests. :type duplicate_count: :class:`metrics.models.PlotBigIntegerField` :keyword file_size_sum: The total size of ingested files in bytes. :type file_size_sum: :class:`metrics.models.PlotBigIntegerField` :keyword file_size_min: The minimum size of ingested files in bytes. :type file_size_min: :class:`metrics.models.PlotBigIntegerField` :keyword file_size_max: The maximum size of ingested files in bytes. :type file_size_max: :class:`metrics.models.PlotBigIntegerField` :keyword file_size_avg: The average size of ingested files in bytes. :type file_size_avg: :class:`metrics.models.PlotBigIntegerField` :keyword transfer_time_sum: The total time spent transferring ingested files in seconds. :type transfer_time_sum: :class:`metrics.models.PlotBigIntegerField` :keyword transfer_time_min: The minimum time spent transferring ingested files in seconds. :type transfer_time_min: :class:`metrics.models.PlotIntegerField` :keyword transfer_time_max: The maximum time spent transferring ingested files in seconds. :type transfer_time_max: :class:`metrics.models.PlotIntegerField` :keyword transfer_time_avg: The average time spent transferring ingested files in seconds. :type transfer_time_avg: :class:`metrics.models.PlotIntegerField` :keyword ingest_time_sum: The total time spent ingesting files in seconds. :type ingest_time_sum: :class:`metrics.models.PlotBigIntegerField` :keyword ingest_time_min: The minimum time spent ingesting files in seconds. :type ingest_time_min: :class:`metrics.models.PlotIntegerField` :keyword ingest_time_max: The maximum time spent ingesting files in seconds. :type ingest_time_max: :class:`metrics.models.PlotIntegerField` :keyword ingest_time_avg: The average time spent ingesting files in seconds. :type ingest_time_avg: :class:`metrics.models.PlotIntegerField` :keyword created: When the model was first created. :type created: :class:`django.db.models.DateTimeField` """ GROUPS = [ MetricsTypeGroup('overview', 'Overview', 'Overall counts based on ingest status.'), MetricsTypeGroup('file_size', 'File Size', 'Size information about ingested files.'), MetricsTypeGroup('transfer_time', 'Transfer Time', 'When files were being transferred before ingest.'), MetricsTypeGroup('ingest_time', 'Ingest Time', 'When files were processed during ingest.'), ] strike = models.ForeignKey('ingest.Strike', on_delete=models.PROTECT) occurred = models.DateTimeField(db_index=True) deferred_count = PlotBigIntegerField(aggregate='sum', blank=True, group='overview', help_text='Number of files deferred (ignored) by the ingest process.', null=True, units='count', verbose_name='Deferred Count') ingested_count = PlotBigIntegerField(aggregate='sum', blank=True, group='overview', help_text='Number of files successfully ingested.', null=True, units='count', verbose_name='Ingested Count') errored_count = PlotBigIntegerField(aggregate='sum', blank=True, group='overview', help_text='Number of files that failed to ingest.', null=True, units='count', verbose_name='Errored Count') duplicate_count = PlotBigIntegerField(aggregate='sum', blank=True, group='overview', help_text='Number of files that were duplicates of previous ingests.', null=True, units='count', verbose_name='Duplicate Count') total_count = PlotBigIntegerField(aggregate='sum', blank=True, group='overview', help_text='Number of deferred, ingested, errored, and duplicate ingests.', null=True, units='count', verbose_name='Total Count') file_size_sum = PlotBigIntegerField(aggregate='sum', blank=True, group='file_size', help_text='Total size of ingested files.', null=True, units='bytes', verbose_name='File Size (Sum)') file_size_min = PlotBigIntegerField(aggregate='min', blank=True, group='file_size', help_text='Minimum size of ingested files.', null=True, units='bytes', verbose_name='File Size (Min)') file_size_max = PlotBigIntegerField(aggregate='max', blank=True, group='file_size', help_text='Maximum size of ingested files.', null=True, units='bytes', verbose_name='File Size (Max)') file_size_avg = PlotBigIntegerField(aggregate='avg', blank=True, group='file_size', help_text='Average size of ingested files.', null=True, units='bytes', verbose_name='File Size (Avg)') transfer_time_sum = PlotBigIntegerField(aggregate='sum', blank=True, group='transfer_time', help_text='Total time spent transferring files before ingest.', null=True, units='seconds', verbose_name='Transfer Time (Sum)') transfer_time_min = PlotIntegerField(aggregate='min', blank=True, group='transfer_time', help_text='Minimum time spent transferring files before ingest.', null=True, units='seconds', verbose_name='Transfer Time (Min)') transfer_time_max = PlotIntegerField(aggregate='max', blank=True, group='transfer_time', help_text='Maximum time spent transferring files before ingest.', null=True, units='seconds', verbose_name='Transfer Time (Max)') transfer_time_avg = PlotIntegerField(aggregate='avg', blank=True, group='transfer_time', help_text='Average time spent transferring files before ingest.', null=True, units='seconds', verbose_name='Transfer Time (Avg)') ingest_time_sum = PlotBigIntegerField(aggregate='sum', blank=True, group='ingest_time', help_text='Total time spent processing files during ingest.', null=True, units='seconds', verbose_name='Ingest Time (Sum)') ingest_time_min = PlotIntegerField(aggregate='min', blank=True, group='ingest_time', help_text='Minimum time spent processing files during ingest.', null=True, units='seconds', verbose_name='Ingest Time (Min)') ingest_time_max = PlotIntegerField(aggregate='max', blank=True, group='ingest_time', help_text='Maximum time spent processing files during ingest.', null=True, units='seconds', verbose_name='Ingest Time (Max)') ingest_time_avg = PlotIntegerField(aggregate='avg', blank=True, group='ingest_time', help_text='Average time spent processing files during ingest.', null=True, units='seconds', verbose_name='Ingest Time (Avg)') created = models.DateTimeField(auto_now_add=True) objects = MetricsIngestManager() class Meta(object): """meta information for the db""" db_table = 'metrics_ingest'
class Practice(models.Model): ''' GP practices. HSCIC practice status is from: http://systems.hscic.gov.uk/data/ods/datadownloads/gppractice/index_html ''' PRESCRIBING_SETTINGS = ((-1, 'Unknown'), (0, 'Other'), (1, 'WIC Practice'), (2, 'OOH Practice'), (3, 'WIC + OOH Practice'), (4, 'GP Practice'), (8, 'Public Health Service'), (9, 'Community Health Service'), (10, 'Hospital Service'), (11, 'Optometry Service'), (12, 'Urgent & Emergency Care'), (13, 'Hospice'), (14, 'Care Home / Nursing Home'), (15, 'Border Force'), (16, 'Young Offender Institution'), (17, 'Secure Training Centre'), (18, "Secure Children's Home"), (19, "Immigration Removal Centre"), (20, "Court"), (21, "Police Custody"), (22, "Sexual Assault Referral Centre (SARC)"), (24, "Other - Justice Estate"), (25, "Prison")) STATUS_SETTINGS = (('U', 'Unknown'), ('A', 'Active'), ('B', 'Retired'), ('C', 'Closed'), ('D', 'Dormant'), ('P', 'Proposed')) ccg = models.ForeignKey(PCT, null=True, blank=True) code = models.CharField(max_length=6, primary_key=True, help_text='Practice code') name = models.CharField(max_length=200) address1 = models.CharField(max_length=200, null=True, blank=True) address2 = models.CharField(max_length=200, null=True, blank=True) address3 = models.CharField(max_length=200, null=True, blank=True) address4 = models.CharField(max_length=200, null=True, blank=True) address5 = models.CharField(max_length=200, null=True, blank=True) postcode = models.CharField(max_length=9, null=True, blank=True) location = models.PointField(null=True, blank=True) setting = models.IntegerField(choices=PRESCRIBING_SETTINGS, default=-1) objects = models.GeoManager() open_date = models.DateField(null=True, blank=True) close_date = models.DateField(null=True, blank=True) join_provider_date = models.DateField(null=True, blank=True) leave_provider_date = models.DateField(null=True, blank=True) status_code = models.CharField(max_length=1, choices=STATUS_SETTINGS, null=True, blank=True) def __str__(self): return self.name @property def cased_name(self): return nhs_titlecase(self.name) def address_pretty(self): address = self.address1 + ', ' if self.address2: address += self.address2 + ', ' if self.address3: address += self.address3 + ', ' if self.address4: address += self.address4 + ', ' if self.address5: address += self.address5 + ', ' address += self.postcode return address def address_pretty_minus_firstline(self): address = '' if self.address2: address += self.address2 + ', ' if self.address3: address += self.address3 + ', ' if self.address4: address += self.address4 + ', ' if self.address5: address += self.address5 + ', ' address += self.postcode return address class Meta: app_label = 'frontend'
class MetricsJobType(models.Model): """Tracks all the job execution metrics grouped by job type. :keyword job_type: The type of job associated with these metrics. :type job_type: :class:`django.db.models.ForeignKey` :keyword occurred: The date when the job executions included in this model were ended. :type occurred: :class:`django.db.models.DateField` :keyword completed_count: The total number of completed job executions. :type completed_count: :class:`metrics.models.PlotBigIntegerField` :keyword failed_count: The total number of failed job executions. :type failed_count: :class:`metrics.models.PlotBigIntegerField` :keyword canceled_count: The total number of canceled job executions. :type canceled_count: :class:`metrics.models.PlotBigIntegerField` :keyword total_count: The total number of ended job executions (completed, failed, canceled). :type total_count: :class:`metrics.models.PlotBigIntegerField` :keyword error_system_count: The number of failed job executions due to a system error. :type error_system_count: :class:`metrics.models.PlotBigIntegerField` :keyword error_data_count: The number of failed job executions due to a data error. :type error_data_count: :class:`metrics.models.PlotBigIntegerField` :keyword error_algorithm_count: The number of failed job executions due to an algorithm error. :type error_algorithm_count: :class:`metrics.models.PlotBigIntegerField` :keyword queue_time_sum: The total time job executions were queued in seconds. :type queue_time_sum: :class:`metrics.models.PlotBigIntegerField` :keyword queue_time_min: The minimum time a job execution was queued in seconds. :type queue_time_min: :class:`metrics.models.PlotIntegerField` :keyword queue_time_max: The maximum time a job execution was queued in seconds. :type queue_time_max: :class:`metrics.models.PlotIntegerField` :keyword queue_time_avg: The average time job executions were queued in seconds. :type queue_time_avg: :class:`metrics.models.PlotIntegerField` :keyword pre_time_sum: The total time job executions were executing pre-task steps in seconds. :type pre_time_sum: :class:`metrics.models.PlotBigIntegerField` :keyword pre_time_min: The minimum time a job execution was executing pre-task steps in seconds. :type pre_time_min: :class:`metrics.models.PlotIntegerField` :keyword pre_time_max: The maximum time a job execution was executing pre-task steps in seconds. :type pre_time_max: :class:`metrics.models.PlotIntegerField` :keyword pre_time_avg: The average time job executions were executing pre-task steps in seconds. :type pre_time_avg: :class:`metrics.models.PlotIntegerField` :keyword job_time_sum: The total time job executions were executing the actual job task in seconds. :type job_time_sum: :class:`metrics.models.PlotBigIntegerField` :keyword job_time_min: The minimum time a job execution was executing the actual job task in seconds. :type job_time_min: :class:`metrics.models.PlotIntegerField` :keyword job_time_max: The maximum time a job execution was executing the actual job task in seconds. :type job_time_max: :class:`metrics.models.PlotIntegerField` :keyword job_time_avg: The average time job executions were executing the actual job task in seconds. :type job_time_avg: :class:`metrics.models.PlotIntegerField` :keyword post_time_sum: The total time job executions were executing post-task steps in seconds. :type post_time_sum: :class:`metrics.models.PlotBigIntegerField` :keyword post_time_min: The minimum time a job execution was executing post-task steps in seconds. :type post_time_min: :class:`metrics.models.PlotIntegerField` :keyword post_time_max: The maximum time a job execution was executing post-task steps in seconds. :type post_time_max: :class:`metrics.models.PlotIntegerField` :keyword post_time_avg: The average time job executions were executing post-task steps in seconds. :type post_time_avg: :class:`metrics.models.PlotIntegerField` :keyword run_time_sum: The total time job executions were running in seconds. :type run_time_sum: :class:`metrics.models.PlotBigIntegerField` :keyword run_time_min: The minimum time a job execution was running in seconds. :type run_time_min: :class:`metrics.models.PlotIntegerField` :keyword run_time_max: The maximum time a job execution was running in seconds. :type run_time_max: :class:`metrics.models.PlotIntegerField` :keyword run_time_avg: The average time job executions were running in seconds. :type run_time_avg: :class:`metrics.models.PlotIntegerField` :keyword stage_time_sum: The total time job executions spent in system staging between tasks in seconds. :type stage_time_sum: :class:`metrics.models.PlotBigIntegerField` :keyword stage_time_min: The minimum time a job execution spent in system staging between tasks in seconds. :type stage_time_min: :class:`metrics.models.PlotIntegerField` :keyword stage_time_max: The maximum time a job execution spent in system staging between tasks in seconds. :type stage_time_max: :class:`metrics.models.PlotIntegerField` :keyword stage_time_avg: The average time job executions spent in system staging between tasks in seconds. :type stage_time_avg: :class:`metrics.models.PlotIntegerField` :keyword created: When the model was first created. :type created: :class:`django.db.models.DateTimeField` """ GROUPS = [ MetricsTypeGroup('overview', 'Overview', 'Overall counts based on job status.'), MetricsTypeGroup('errors', 'Errors', 'Overall error counts based on category.'), MetricsTypeGroup('queue_time', 'Queue Time', 'When jobs were in the queue.'), MetricsTypeGroup('pre_time', 'Pre-task Time', 'When jobs were being prepared.'), MetricsTypeGroup('job_time', 'Job Task Time', 'When jobs were executing their actual goal.'), MetricsTypeGroup('post_time', 'Post-task Time', 'When jobs were being cleaned up.'), MetricsTypeGroup('run_time', 'Run Time', 'When related tasks were run (pre, job, post).'), MetricsTypeGroup('stage_time', 'Stage Time', 'Times related to the overhead of the system.'), ] job_type = models.ForeignKey('job.JobType', on_delete=models.PROTECT) occurred = models.DateTimeField(db_index=True) completed_count = PlotBigIntegerField(aggregate='sum', blank=True, group='overview', help_text='Number of successfully completed jobs.', null=True, units='count', verbose_name='Completed Count') failed_count = PlotBigIntegerField(aggregate='sum', blank=True, group='overview', help_text='Number of incomplete failed jobs.', null=True, units='count', verbose_name='Failed Count') canceled_count = PlotBigIntegerField(aggregate='sum', blank=True, group='overview', help_text='Number of incomplete canceled jobs.', null=True, units='count', verbose_name='Canceled Count') total_count = PlotBigIntegerField(aggregate='sum', blank=True, group='overview', help_text='Number of completed, failed, and canceled jobs.', null=True, units='count', verbose_name='Total Count') error_system_count = PlotBigIntegerField(aggregate='sum', blank=True, group='errors', help_text='Number of failed jobs due to a system error.', null=True, units='count', verbose_name='System Error Count') error_data_count = PlotBigIntegerField(aggregate='sum', blank=True, group='errors', help_text='Number of failed jobs due to a data error.', null=True, units='count', verbose_name='Data Error Count') error_algorithm_count = PlotBigIntegerField(aggregate='sum', blank=True, group='errors', help_text='Number of failed jobs due to an algorithm error.', null=True, units='count', verbose_name='Algorithm Error Count') queue_time_sum = PlotBigIntegerField(aggregate='sum', blank=True, group='queue_time', help_text='Total time the job waited in the queue.', null=True, units='seconds', verbose_name='Queue Time (Sum)') queue_time_min = PlotIntegerField(aggregate='min', blank=True, group='queue_time', help_text='Minimum time the job waited in the queue.', null=True, units='seconds', verbose_name='Queue Time (Min)') queue_time_max = PlotIntegerField(aggregate='max', blank=True, group='queue_time', help_text='Maximum time the job waited in the queue.', null=True, units='seconds', verbose_name='Queue Time (Max)') queue_time_avg = PlotIntegerField(aggregate='avg', blank=True, group='queue_time', help_text='Average time the job waited in the queue.', null=True, units='seconds', verbose_name='Queue Time (Avg)') pre_time_sum = PlotBigIntegerField(aggregate='sum', blank=True, group='pre_time', help_text='Total time spent preparing the job task.', null=True, units='seconds', verbose_name='Pre-task Time (Sum)') pre_time_min = PlotIntegerField(aggregate='min', blank=True, group='pre_time', help_text='Minimum time spent preparing the job task.', null=True, units='seconds', verbose_name='Pre-task Time (Min)') pre_time_max = PlotIntegerField(aggregate='max', blank=True, group='pre_time', help_text='Maximum time spent preparing the job task.', null=True, units='seconds', verbose_name='Pre-task Time (Max)') pre_time_avg = PlotIntegerField(aggregate='avg', blank=True, group='pre_time', help_text='Average time spent preparing the job task.', null=True, units='seconds', verbose_name='Pre-task Time (Avg)') job_time_sum = PlotBigIntegerField(aggregate='sum', blank=True, group='job_time', help_text='Total time spent running the job task.', null=True, units='seconds', verbose_name='Job Task Time (Sum)') job_time_min = PlotIntegerField(aggregate='min', blank=True, group='job_time', help_text='Minimum time spent running the job task.', null=True, units='seconds', verbose_name='Job Task Time (Min)') job_time_max = PlotIntegerField(aggregate='max', blank=True, group='job_time', help_text='Maximum time spent running the job task.', null=True, units='seconds', verbose_name='Job Task Time (Max)') job_time_avg = PlotIntegerField(aggregate='avg', blank=True, group='job_time', help_text='Average time spent running the job task.', null=True, units='seconds', verbose_name='Job Task Time (Avg)') post_time_sum = PlotBigIntegerField(aggregate='sum', blank=True, group='post_time', help_text='Total time spent finalizing the job task.', null=True, units='seconds', verbose_name='Post-task Time (Sum)') post_time_min = PlotIntegerField(aggregate='min', blank=True, group='post_time', help_text='Minimum time spent finalizing the job task.', null=True, units='seconds', verbose_name='Post-task Time (Min)') post_time_max = PlotIntegerField(aggregate='max', blank=True, group='post_time', help_text='Maximum time spent finalizing the job task.', null=True, units='seconds', verbose_name='Post-task Time (Max)') post_time_avg = PlotIntegerField(aggregate='avg', blank=True, group='post_time', help_text='Average time spent finalizing the job task.', null=True, units='seconds', verbose_name='Post-task Time (Avg)') run_time_sum = PlotBigIntegerField(aggregate='sum', blank=True, group='run_time', help_text='Total time spent running the pre, job, and post tasks.', null=True, units='seconds', verbose_name='Run Time (Sum)') run_time_min = PlotIntegerField(aggregate='min', blank=True, group='run_time', help_text='Minimum time spent running the pre, job, and post tasks.', null=True, units='seconds', verbose_name='Run Time (Min)') run_time_max = PlotIntegerField(aggregate='max', blank=True, group='run_time', help_text='Maximum time spent running the pre, job, and post tasks.', null=True, units='seconds', verbose_name='Run Time (Max)') run_time_avg = PlotIntegerField(aggregate='avg', blank=True, group='run_time', help_text='Average time spent running the pre, job, and post tasks.', null=True, units='seconds', verbose_name='Run Time (Avg)') stage_time_sum = PlotBigIntegerField(aggregate='sum', blank=True, group='stage_time', help_text='Total overhead time spent managing tasks.', null=True, units='seconds', verbose_name='Stage Time (Sum)') stage_time_min = PlotIntegerField(aggregate='min', blank=True, group='stage_time', help_text='Minimum overhead time spent managing tasks.', null=True, units='seconds', verbose_name='Stage Time (Min)') stage_time_max = PlotIntegerField(aggregate='min', blank=True, group='stage_time', help_text='Maximum overhead time spent managing tasks.', null=True, units='seconds', verbose_name='Stage Time (Max)') stage_time_avg = PlotIntegerField(aggregate='avg', blank=True, group='stage_time', help_text='Average overhead time spent managing tasks.', null=True, units='seconds', verbose_name='Stage Time (Avg)') created = models.DateTimeField(auto_now_add=True) objects = MetricsJobTypeManager() class Meta(object): """meta information for the db""" db_table = 'metrics_job_type'
class GeneralInformation(models.Model): """ Abstract model for General Information """ original_id = models.CharField( _("Original ID"), max_length=64, help_text=_('As recorded in the original database.')) # geometry information location = models.PointField( _("Location"), help_text=_("Location of the feature.") ) ground_surface_elevation = models.OneToOneField( Quantity, on_delete=models.SET_NULL, null=True, blank=True, help_text=_('Ground surface elevation above sea level.'), related_name='ground_surface_elevation', verbose_name=_('Ground surface elevation') ) top_borehole_elevation = models.OneToOneField( Quantity, on_delete=models.SET_NULL, null=True, blank=True, help_text=_('Elevation of top of borehole above sea level.'), related_name='top_borehole_elevation', verbose_name=_('Top of well elevation') ) name = models.CharField( _("Name"), null=True, blank=True, max_length=64 ) feature_type = models.ForeignKey( TermFeatureType, on_delete=models.SET_NULL, null=True, blank=True, verbose_name=_('Feature type') ) # location information address = models.TextField( _("Address"), null=True, blank=True ) country = models.ForeignKey( Country, on_delete=models.SET_NULL, null=True, blank=True, verbose_name=_('Country') ) # information photo = models.FileField( _("Photo"), null=True, blank=True, upload_to='gwml2/photos/', help_text=_('A photo of the groundwater point. More photos can be added in annex.') ) description = models.TextField( _("Description"), null=True, blank=True, help_text=_('A general description of the groundwater point.') ) class Meta: abstract = True
class SourcePoi(Poi): data_source = models.ForeignKey(Source)
class Callsign(LocationBaseModel): name = CallsignField(unique=True, db_index=True) prefix = models.ForeignKey(CallsignPrefix, on_delete=models.PROTECT, null=True, blank=True) country = models.ForeignKey(Country, on_delete=models.PROTECT, null=True, blank=True) cq_zone = CQZoneField("CQ zone", null=True, blank=True) itu_zone = ITUZoneField("ITU zone", null=True, blank=True) itu_region = ITURegionField("ITU region", null=True, blank=True) type = models.CharField(choices=CALLSIGN_TYPES, max_length=32, blank=True) owner = models.ForeignKey(get_user_model(), on_delete=models.SET_NULL, null=True, blank=True) active = models.BooleanField(default=True) issued = models.DateField(null=True, blank=True) expired = models.DateField(null=True, blank=True) license_type = models.CharField(max_length=64, blank=True) dstar = models.BooleanField("D-STAR", default=False) identifier = models.CharField(_("Optional identifier"), max_length=128, unique=True, blank=True, null=True) website = models.URLField(max_length=128, blank=True, null=True) comment = models.TextField(blank=True) _official_validated = models.BooleanField( default=False, help_text="Callsign is validated by a government agency") _location_source = models.CharField(max_length=32, choices=LOCATION_SOURCE_CHOICES, blank=True) lotw_last_activity = models.DateTimeField("LOTW last activity", null=True, blank=True) eqsl = models.BooleanField(default=False) created_by = models.ForeignKey(get_user_model(), on_delete=models.SET(get_sentinel_user), related_name="callsigns") internal_comment = models.TextField(blank=True) source = models.CharField(max_length=256, blank=True) objects = CallsignManager() # TODO(elnappo) make sure a user can not change his name after validation def __str__(self) -> str: return self.name def set_default_meta_data(self): prefix = CallsignPrefix.objects.extra( where=["%s LIKE name||'%%'"], params=[self.name]).order_by("-name").first() # Add changed fields to ImportCommand._callsign_bulk_create() if prefix: self.prefix = prefix self.country = prefix.country self.cq_zone = prefix.cq_zone self.itu_zone = prefix.itu_zone self.location = prefix.location self._location_source = "prefix" def get_absolute_url(self) -> str: return reverse('callsign:callsign-html-detail', args=[self.name]) def update_location(self, location: Point, source: str) -> bool: # Update location only if new location source has higher priority than current location source. # Does no update if new and current location source are equal. if LOCATION_SOURCE_PRIORITY.index( source) > LOCATION_SOURCE_PRIORITY.index(self.location_source): self.location = location self._location_source = source return True else: return False @property def aprs_passcode(self) -> int: if self.official_validated == "false" or self.type == "shortwave_listener": # Not a good idea to raise an exception here? raise PermissionDenied( "callsign is not official assigned or is shortwave listener") return generate_aprs_passcode(self.name) @property def official_validated(self) -> str: if self.country and self.country.telecommunicationagency.used_for_official_callsign_import and self._official_validated: return "true" elif self.country and self.country.telecommunicationagency.used_for_official_callsign_import and not self._official_validated: return "false" else: return "unknown" @property def location_source(self) -> str: if self._location_source in ("official", "unofficial"): return "address" else: return self._location_source @property def grid(self): if self.location_source == "prefix": return self._grid(high_accuracy=False) else: return self._grid(high_accuracy=True) @property def lotw(self) -> bool: return bool(self.lotw_last_activity) @property def eqsl_profile_url(self) -> str: return f"https://www.eqsl.cc/Member.cfm?{ self.name }" @property def clublog_profile_url(self) -> str: return f"https://secure.clublog.org/logsearch/{ self.name }" @property def dxheat_profile_url(self) -> str: return f"https://dxheat.com/db/{ self.name }" @property def aprsfi_profile_url(self) -> str: return f"https://aprs.fi/info/?call={ self.name }" @property def pskreporter_profile_url(self) -> str: return f"http://www.pskreporter.de/table?call={ self.name }" @property def qrzcq_profile_url(self) -> str: return f"https://www.qrzcq.com/call/{ self.name }" @property def qrz_profile_url(self) -> str: return f"https://www.qrz.com/db/{ self.name }" @property def hamqth_profile_url(self) -> str: return f"https://www.hamqth.com/{ self.name }" @property def hamcall_profile_url(self) -> str: return f"https://hamcall.net/call?callsign={ self.name }" @property def dxwatch_profile_url(self) -> str: return f"https://dxwatch.com/qrz/{self.name}"
class TrafficSignPlan( DecimalValueFromDeviceTypeMixin, UpdatePlanLocationMixin, SourceControlModel, SoftDeleteModel, UserControlModel, ): id = models.UUIDField(primary_key=True, unique=True, editable=False, default=uuid.uuid4) location = models.PointField(_("Location (3D)"), dim=3, srid=settings.SRID) height = models.IntegerField(_("Height"), blank=True, null=True) direction = models.IntegerField(_("Direction"), default=0) device_type = models.ForeignKey( TrafficControlDeviceType, verbose_name=_("Device type"), on_delete=models.PROTECT, limit_choices_to=Q( Q(target_model=None) | Q(target_model=DeviceTypeTargetModel.TRAFFIC_SIGN)), ) value = models.DecimalField( _("Traffic Sign Code value"), max_digits=10, decimal_places=2, blank=True, null=True, ) txt = models.CharField(_("Txt"), max_length=254, blank=True, null=True) mount_plan = models.ForeignKey( MountPlan, verbose_name=_("Mount Plan"), on_delete=models.PROTECT, blank=True, null=True, ) mount_type = models.ForeignKey( MountType, verbose_name=_("Mount type"), blank=True, null=True, on_delete=models.SET_NULL, ) validity_period_start = models.DateField(_("Validity period start"), blank=True, null=True) validity_period_end = models.DateField(_("Validity period end"), blank=True, null=True) affect_area = models.PolygonField(_("Affect area (2D)"), srid=settings.SRID, blank=True, null=True) plan = models.ForeignKey( Plan, verbose_name=_("Plan"), on_delete=models.PROTECT, related_name="traffic_sign_plans", blank=True, null=True, ) size = EnumField( Size, verbose_name=_("Size"), max_length=1, default=Size.MEDIUM, blank=True, null=True, ) reflection_class = EnumField( Reflection, verbose_name=_("Reflection"), max_length=2, default=Reflection.R1, blank=True, null=True, ) surface_class = EnumField( Surface, verbose_name=_("Surface"), max_length=6, default=Surface.FLAT, blank=True, null=True, ) seasonal_validity_period_start = models.DateField( _("Seasonal validity period start"), blank=True, null=True) seasonal_validity_period_end = models.DateField( _("Seasonal validity period end"), blank=True, null=True) owner = models.ForeignKey( "traffic_control.Owner", verbose_name=_("Owner"), blank=False, null=False, on_delete=models.PROTECT, ) lifecycle = EnumIntegerField(Lifecycle, verbose_name=_("Lifecycle"), default=Lifecycle.ACTIVE) road_name = models.CharField(_("Road name"), max_length=254, blank=True, null=True) lane_number = EnumField(LaneNumber, verbose_name=_("Lane number"), default=LaneNumber.MAIN_1, blank=True) lane_type = EnumField( LaneType, verbose_name=_("Lane type"), default=LaneType.MAIN, blank=True, ) location_specifier = EnumIntegerField( LocationSpecifier, verbose_name=_("Location specifier"), default=LocationSpecifier.RIGHT, blank=True, null=True, ) objects = TrafficSignPlanQuerySet.as_manager() class Meta: db_table = "traffic_sign_plan" verbose_name = _("Traffic Sign Plan") verbose_name_plural = _("Traffic Sign Plans") unique_together = ["source_name", "source_id"] def __str__(self): return f"{self.id} {self.device_type}" def save(self, *args, **kwargs): if not self.device_type.validate_relation( DeviceTypeTargetModel.TRAFFIC_SIGN): raise ValidationError( f'Device type "{self.device_type}" is not allowed for traffic signs' ) super().save(*args, **kwargs) def has_additional_signs(self): return self.additional_signs.active().exists() @transaction.atomic def soft_delete(self, user): super().soft_delete(user) self.additional_signs.soft_delete(user)
class Resource(ModifiableModel, AutoIdentifiedModel): AUTHENTICATION_TYPES = (('none', _('None')), ('weak', _('Weak')), ('strong', _('Strong'))) ACCESS_CODE_TYPE_NONE = 'none' ACCESS_CODE_TYPE_PIN6 = 'pin6' ACCESS_CODE_TYPES = ( (ACCESS_CODE_TYPE_NONE, _('None')), (ACCESS_CODE_TYPE_PIN6, _('6-digit pin code')), ) id = models.CharField(primary_key=True, max_length=100) public = models.BooleanField(default=True, verbose_name=_('Public')) unit = models.ForeignKey('Unit', verbose_name=_('Unit'), db_index=True, null=True, blank=True, related_name="resources", on_delete=models.PROTECT) type = models.ForeignKey(ResourceType, verbose_name=_('Resource type'), db_index=True, on_delete=models.PROTECT) purposes = models.ManyToManyField(Purpose, verbose_name=_('Purposes')) name = models.CharField(verbose_name=_('Name'), max_length=200) description = models.TextField(verbose_name=_('Description'), null=True, blank=True) owner_email = models.TextField(verbose_name=_('Owner email'), null=True, blank=True) need_manual_confirmation = models.BooleanField( verbose_name=_('Need manual confirmation'), default=False) authentication = models.CharField(blank=False, verbose_name=_('Authentication'), max_length=20, choices=AUTHENTICATION_TYPES) people_capacity = models.IntegerField(verbose_name=_('People capacity'), null=True, blank=True) area = models.IntegerField(verbose_name=_('Area'), null=True, blank=True) # if not set, location is inherited from unit location = models.PointField(verbose_name=_('Location'), null=True, blank=True, srid=settings.DEFAULT_SRID) min_period = models.DurationField( verbose_name=_('Minimum reservation time'), default=datetime.timedelta(minutes=30)) max_period = models.DurationField( verbose_name=_('Maximum reservation time'), null=True, blank=True) equipment = models.ManyToManyField(Equipment, verbose_name=_('Equipment'), through='ResourceEquipment') max_reservations_per_user = models.IntegerField( verbose_name=_('Maximum number of active reservations per user'), null=True, blank=True) reservable = models.BooleanField(verbose_name=_('Reservable'), default=False) reservation_info = models.TextField(verbose_name=_('Reservation info'), null=True, blank=True) responsible_contact_info = models.TextField( verbose_name=_('Responsible contact info'), blank=True) generic_terms = models.ForeignKey(TermsOfUse, verbose_name=_('Generic terms'), null=True, blank=True, on_delete=models.SET_NULL) specific_terms = models.TextField(verbose_name=_('Specific terms'), blank=True) reservation_confirmed_notification_extra = models.TextField(verbose_name=_( 'Extra content to reservation confirmed ' 'notification'), blank=True) min_price_per_hour = models.DecimalField( verbose_name=_('Min price per hour'), max_digits=8, decimal_places=2, blank=True, null=True, validators=[MinValueValidator(Decimal('0.00'))]) max_price_per_hour = models.DecimalField( verbose_name=_('Max price per hour'), max_digits=8, decimal_places=2, blank=True, null=True, validators=[MinValueValidator(Decimal('0.00'))]) access_code_type = models.CharField(verbose_name=_('Access code type'), max_length=20, choices=ACCESS_CODE_TYPES, default=ACCESS_CODE_TYPE_NONE) reservable_days_in_advance = models.PositiveSmallIntegerField( verbose_name=_('Reservable days in advance'), null=True, blank=True) reservation_metadata_set = models.ForeignKey( 'resources.ReservationMetadataSet', null=True, blank=True, on_delete=models.SET_NULL) objects = ResourceQuerySet.as_manager() ceepos_payment_required = models.BooleanField( verbose_name=_('Ceepos payment required'), default=False) product_code = models.CharField(verbose_name=_('Product code'), null=False, blank=True, max_length=25, default='') class Meta: verbose_name = _("resource") verbose_name_plural = _("resources") ordering = ( 'unit', 'name', ) def __str__(self): return "%s (%s)/%s" % (get_translated(self, 'name'), self.id, self.unit) def validate_reservation_period(self, reservation, user, data=None): """ Check that given reservation if valid for given user. Reservation may be provided as Reservation or as a data dict. When providing the data dict from a serializer, reservation argument must be present to indicate the reservation being edited, or None if we are creating a new reservation. If the reservation is not valid raises a ValidationError. Staff members have no restrictions at least for now. Normal users cannot make multi day reservations or reservations outside opening hours. :type reservation: Reservation :type user: User :type data: dict[str, Object] """ # no restrictions for staff if self.is_admin(user): return tz = self.unit.get_tz() # check if data from serializer is present: if data: begin = data['begin'] end = data['end'] else: # if data is not provided, the reservation object has the desired data: begin = reservation.begin end = reservation.end if begin.tzinfo: begin = begin.astimezone(tz) else: begin = tz.localize(begin) if end.tzinfo: end = end.astimezone(tz) else: end = tz.localize(end) if begin.date() != end.date(): raise ValidationError(_("You cannot make a multi day reservation")) opening_hours = self.get_opening_hours(begin.date(), end.date()) days = opening_hours.get(begin.date(), None) if days is None or not any( day['opens'] and begin >= day['opens'] and end <= day['closes'] for day in days): if not self._has_perm(user, 'can_ignore_opening_hours'): raise ValidationError( _("You must start and end the reservation during opening hours" )) if self.max_period and (end - begin) > self.max_period: raise ValidationError( _("The maximum reservation length is %(max_period)s") % {'max_period': humanize_duration(self.max_period)}) def validate_max_reservations_per_user(self, user): """ Check maximum number of active reservations per user per resource. If the user has too many reservations raises ValidationError. Staff members have no reservation limits. :type user: User """ if self.is_admin(user): return max_count = self.max_reservations_per_user if max_count is not None: reservation_count = self.reservations.filter( user=user).active().count() if reservation_count >= max_count: raise ValidationError( _("Maximum number of active reservations for this resource exceeded." )) def check_reservation_collision(self, begin, end, reservation): overlapping = self.reservations.filter(end__gt=begin, begin__lt=end).active() if reservation: overlapping = overlapping.exclude(pk=reservation.pk) return overlapping.exists() def get_available_hours(self, start=None, end=None, duration=None, reservation=None, during_closing=False): """ Returns hours that the resource is not reserved for a given date range If include_closed=True, will also return hours when the resource is closed, if it is not reserved. This is so that admins can book resources during closing hours. Returns the available hours as a list of dicts. The optional reservation argument is for disregarding a given reservation during checking, if we wish to move an existing reservation. The optional duration argument specifies minimum length for periods to be returned. :rtype: list[dict[str, datetime.datetime]] :type start: datetime.datetime :type end: datetime.datetime :type duration: datetime.timedelta :type reservation: Reservation :type during_closing: bool """ today = arrow.get(timezone.now()) if start is None: start = today.floor('day').naive if end is None: end = today.replace(days=+1).floor('day').naive if not start.tzinfo and not end.tzinfo: """ Only try to localize naive dates """ tz = timezone.get_current_timezone() start = tz.localize(start) end = tz.localize(end) if not during_closing: """ Check open hours only """ open_hours = self.get_opening_hours(start, end) hours_list = [] for date, open_during_date in open_hours.items(): for period in open_during_date: if period['opens']: # if the start or end straddle opening hours opens = period[ 'opens'] if period['opens'] > start else start closes = period[ 'closes'] if period['closes'] < end else end # include_closed to prevent recursion, opening hours need not be rechecked hours_list.extend( self.get_available_hours(start=opens, end=closes, duration=duration, reservation=reservation, during_closing=True)) return hours_list reservations = self.reservations.filter( end__gte=start, begin__lte=end).order_by('begin') hours_list = [({'starts': start})] first_checked = False for res in reservations: # skip the reservation that is being edited if res == reservation: continue # check if the reservation spans the beginning if not first_checked: first_checked = True if res.begin < start: if res.end > end: return [] hours_list[0]['starts'] = res.end # proceed to the next reservation continue if duration: if res.begin - hours_list[-1]['starts'] < duration: # the free period is too short, discard this period hours_list[-1]['starts'] = res.end continue hours_list[-1]['ends'] = timezone.localtime(res.begin) # check if the reservation spans the end if res.end > end: return hours_list hours_list.append({'starts': timezone.localtime(res.end)}) # after the last reservation, we must check if the remaining free period is too short if duration: if end - hours_list[-1]['starts'] < duration: hours_list.pop() return hours_list # otherwise add the remaining free period hours_list[-1]['ends'] = end return hours_list def get_opening_hours(self, begin=None, end=None, opening_hours_cache=None): """ :rtype : dict[str, datetime.datetime] :type begin: datetime.date :type end: datetime.date """ tz = pytz.timezone(self.unit.time_zone) begin, end = determine_hours_time_range(begin, end, tz) if opening_hours_cache is None: hours_objs = self.opening_hours.filter( open_between__overlap=(begin, end, '[)')) else: hours_objs = opening_hours_cache opening_hours = dict() for h in hours_objs: opens = h.open_between.lower.astimezone(tz) closes = h.open_between.upper.astimezone(tz) date = opens.date() hours_item = OrderedDict(opens=opens, closes=closes) date_item = opening_hours.setdefault(date, []) date_item.append(hours_item) # Set the dates when the resource is closed. date = begin.date() end = end.date() while date < end: if date not in opening_hours: opening_hours[date] = [OrderedDict(opens=None, closes=None)] date += datetime.timedelta(days=1) return opening_hours def update_opening_hours(self): hours = self.opening_hours.order_by('open_between') existing_hours = {} for h in hours: assert h.open_between.lower not in existing_hours existing_hours[h.open_between.lower] = h.open_between.upper unit_periods = list(self.unit.periods.all()) resource_periods = list(self.periods.all()) # Periods set for the resource always carry a higher priority. If # nothing is defined for the resource for a given day, use the # periods configured for the unit. for period in unit_periods: period.priority = 0 for period in resource_periods: period.priority = 1 earliest_date = None latest_date = None all_periods = unit_periods + resource_periods for period in all_periods: if earliest_date is None or period.start < earliest_date: earliest_date = period.start if latest_date is None or period.end > latest_date: latest_date = period.end # Assume we delete everything, but remove items from the delete # list if the hours are identical. to_delete = existing_hours to_add = {} if all_periods: hours = get_opening_hours(self.unit.time_zone, all_periods, earliest_date, latest_date) for hours_items in hours.values(): for h in hours_items: if not h['opens'] or not h['closes']: continue if h['opens'] in to_delete and h['closes'] == to_delete[ h['opens']]: del to_delete[h['opens']] continue to_add[h['opens']] = h['closes'] if to_delete: ret = ResourceDailyOpeningHours.objects.filter( open_between__in=[(opens, closes, '[)') for opens, closes in to_delete.items()], resource=self).delete() assert ret[0] == len(to_delete) add_objs = [ ResourceDailyOpeningHours(resource=self, open_between=(opens, closes, '[)')) for opens, closes in to_add.items() ] if add_objs: ResourceDailyOpeningHours.objects.bulk_create(add_objs) def is_admin(self, user): # Currently all staff members are allowed to administrate # all resources. Will be more finegrained in the future. # # UserFilterBackend and ReservationFilterSet in resources.api.reservation assume the same behaviour, # so if this is changed those need to be changed as well. return user.is_staff def _has_perm(self, user, perm, allow_admin=True): if not (user and user.is_authenticated): return False # Admins are almighty. if self.is_admin(user) and allow_admin: return True if hasattr(self, '_permission_checker'): checker = self._permission_checker else: checker = ObjectPermissionChecker(user) # Permissions can be given per-unit if checker.has_perm('unit:%s' % perm, self.unit): return True # ... or through Resource Groups resource_group_perms = [ checker.has_perm('group:%s' % perm, rg) for rg in self.groups.all() ] return any(resource_group_perms) def get_users_with_perm(self, perm): users = { u for u in get_users_with_perms(self.unit) if u.has_perm('unit:%s' % perm, self.unit) } for rg in self.groups.all(): users |= { u for u in get_users_with_perms(rg) if u.has_perm('group:%s' % perm, rg) } return users def can_make_reservations(self, user): return self.reservable or self._has_perm(user, 'can_make_reservations') def can_modify_reservations(self, user): return self._has_perm(user, 'can_modify_reservations') def can_ignore_opening_hours(self, user): return self._has_perm(user, 'can_ignore_opening_hours') def can_view_reservation_extra_fields(self, user): return self._has_perm(user, 'can_view_reservation_extra_fields') def can_access_reservation_comments(self, user): return self._has_perm(user, 'can_access_reservation_comments') def can_view_catering_orders(self, user): return self._has_perm(user, 'can_view_reservation_catering_orders') def can_modify_catering_orders(self, user): return self._has_perm(user, 'can_modify_reservation_catering_orders') def can_approve_reservations(self, user): return self._has_perm(user, 'can_approve_reservation', allow_admin=False) def can_view_access_codes(self, user): return self._has_perm(user, 'can_view_reservation_access_code') def is_access_code_enabled(self): return self.access_code_type != Resource.ACCESS_CODE_TYPE_NONE def get_reservable_days_in_advance(self): return self.reservable_days_in_advance or self.unit.reservable_days_in_advance def get_reservable_before(self): return create_reservable_before_datetime( self.get_reservable_days_in_advance()) def get_supported_reservation_extra_field_names(self, cache=None): if not self.reservation_metadata_set_id: return [] if cache: metadata_set = cache[self.reservation_metadata_set_id] else: metadata_set = self.reservation_metadata_set return [x.field_name for x in metadata_set.supported_fields.all()] def get_required_reservation_extra_field_names(self, cache=None): if not self.reservation_metadata_set: return [] if cache: metadata_set = cache[self.reservation_metadata_set_id] else: metadata_set = self.reservation_metadata_set return [x.field_name for x in metadata_set.required_fields.all()] def clean(self): if self.min_price_per_hour is not None and self.max_price_per_hour is not None: if self.min_price_per_hour > self.max_price_per_hour: raise ValidationError({ 'min_price_per_hour': _('This value cannot be greater than max price per hour') })
class DummyNullFieldsModel(models.Model): null_foreign_key = models.ForeignKey('DummyBlankFieldsModel', null=True, on_delete=models.CASCADE) null_integer_field = models.IntegerField(null=True)
class ResourceImage(ModifiableModel): TYPES = ( ('main', _('Main photo')), ('ground_plan', _('Ground plan')), ('map', _('Map')), ('other', _('Other')), ) resource = models.ForeignKey('Resource', verbose_name=_('Resource'), db_index=True, related_name='images', on_delete=models.CASCADE) type = models.CharField(max_length=20, verbose_name=_('Type'), choices=TYPES) caption = models.CharField(max_length=100, verbose_name=_('Caption'), null=True, blank=True) # FIXME: name images based on resource, type, and sort_order image = models.ImageField(verbose_name=_('Image'), upload_to='resource_images') image_format = models.CharField(max_length=10) cropping = ImageRatioField('image', '800x800', verbose_name=_('Cropping')) sort_order = models.PositiveSmallIntegerField(verbose_name=_('Sort order')) def save(self, *args, **kwargs): self._process_image() if self.sort_order is None: other_images = self.resource.images.order_by('-sort_order') if not other_images: self.sort_order = 0 else: self.sort_order = other_images[0].sort_order + 1 if self.type == "main": other_main_images = self.resource.images.filter(type="main") if other_main_images.exists(): # Demote other main images to "other". # The other solution would be to raise an error, but that would # lead to a more awkward API experience (having to first patch other # images for the resource, then fix the last one). other_main_images.update(type="other") return super(ResourceImage, self).save(*args, **kwargs) def full_clean(self, exclude=(), validate_unique=True): if "image" not in exclude: self._process_image() return super(ResourceImage, self).full_clean(exclude, validate_unique) def _process_image(self): """ Preprocess the uploaded image file, if required. This may transcode the image to a JPEG or PNG if it's not either to begin with. :raises InvalidImage: Exception raised if the uploaded file is not valid. """ if not self.image: # No image set - we can't do this right now return if self.image_format: # Assume that if image_format is set, no further processing is required return try: img = Image.open(self.image) img.load() except Exception as exc: raise InvalidImage("Image %s not valid (%s)" % (self.image, exc)) from exc if img.format not in ("JPEG", "PNG"): # Needs transcoding. if self.type in ("map", "ground_plan"): target_format = "PNG" save_kwargs = {} else: target_format = "JPEG" save_kwargs = {"quality": 75, "progressive": True} image_bio = BytesIO() img.save(image_bio, format=target_format, **save_kwargs) self.image = ContentFile( image_bio.getvalue(), name=os.path.splitext(self.image.name)[0] + ".%s" % target_format.lower()) self.image_format = target_format else: # All good -- keep the file as-is. self.image_format = img.format # def get_upload_filename(image, filename): -- used to live here, but was dead code def __str__(self): return "%s image for %s" % (self.get_type_display(), str( self.resource)) class Meta: verbose_name = _('resource image') verbose_name_plural = _('resource images') unique_together = (('resource', 'sort_order'), )
class BaseModelForNext(models.Model): fk = models.ForeignKey(ModelWithNext, on_delete=models.CASCADE)
class SensitiveArea(MapEntityMixin, StructureRelated, TimeStampedModelMixin, NoDeleteMixin, AddPropertyMixin): geom = models.GeometryField(srid=settings.SRID) species = models.ForeignKey(Species, verbose_name=_(u"Sensitive area"), db_column='espece', on_delete=models.PROTECT) published = models.BooleanField(verbose_name=_(u"Published"), default=False, help_text=_(u"Online"), db_column='public') publication_date = models.DateField(verbose_name=_(u"Publication date"), null=True, blank=True, editable=False, db_column='date_publication') description = models.TextField(verbose_name=_("Description"), blank=True) contact = models.TextField(verbose_name=_("Contact"), blank=True) eid = models.CharField(verbose_name=_(u"External id"), max_length=128, blank=True, null=True, db_column='id_externe') objects = NoDeleteMixin.get_manager_cls(models.GeoManager)() class Meta: db_table = 's_t_zone_sensible' verbose_name = _(u"Sensitive area") verbose_name_plural = _(u"Sensitive areas") permissions = (("import_sensitivearea", "Can import Sensitive area"), ) def __unicode__(self): return self.species.name @property def radius(self): if self.species.radius is None: return settings.SENSITIVITY_DEFAULT_RADIUS return self.species.radius @classproperty def radius_verbose_name(cls): return _("Radius") @property def category_display(self): return self.species.get_category_display() @classproperty def category_verbose_name(cls): return _("Category") def save(self, *args, **kwargs): if self.publication_date is None and self.published: self.publication_date = datetime.date.today() if self.publication_date is not None and not self.published: self.publication_date = None super(SensitiveArea, self).save(*args, **kwargs) @property def any_published(self): return self.published @property def published_status(self): """Returns the publication status by language. """ status = [] for l in settings.MAPENTITY_CONFIG['TRANSLATED_LANGUAGES']: status.append({ 'lang': l[0], 'language': l[1], 'status': self.published }) return status @property def published_langs(self): """Returns languages in which the object is published. """ if self.published: return [ l[0] for l in settings.MAPENTITY_CONFIG['TRANSLATED_LANGUAGES'] ] else: return [] @property def species_display(self): s = u'<a data-pk="%s" href="%s" title="%s">%s</a>' % ( self.pk, self.get_detail_url(), self.species.name, self.species.name) if self.published: s = u'<span class="badge badge-success" title="%s">☆</span> ' % _( "Published") + s return s @property def extent(self): return self.geom.transform(settings.API_SRID, clone=True).extent if self.geom else None def kml(self): """Exports sensitivearea into KML format""" kml = simplekml.Kml() geom = self.geom if geom.geom_type == 'Point': geom = geom.buffer( self.species.radius or settings.SENSITIVITY_DEFAULT_RADIUS, 4) geom = geom.transform(4326, clone=True) # KML uses WGS84 line = kml.newpolygon(name=self.species.name, description=plain_text(self.description), outerboundaryis=geom.coords[0]) line.style.linestyle.color = simplekml.Color.red # Red line.style.linestyle.width = 4 # pixels return kml.kml() def is_public(self): return self.published @property def pretty_period(self): return self.species.pretty_period() pretty_period_verbose_name = _("Period") @property def pretty_practices(self): return self.species.pretty_practices() pretty_practices_verbose_name = _("Practices")
class User(models.Model): profile = models.ForeignKey(Profile, blank=True, null=True, on_delete=models.CASCADE)
class SavedPlace(models.Model): """ Either a db.Location or streets.Block that a user saves a reference to so they can easily find it later. """ objects = models.GeoManager() user_id = models.IntegerField() block_center = models.PointField( null=True, blank=True, help_text=u'Point representing the center of a related block.') location = models.ForeignKey(Location, blank=True, null=True) nickname = models.CharField(max_length=128, blank=True) def __unicode__(self): return u'User %s: %s' % (self.user_id, self.place.pretty_name) @property def place(self): return self._get_block() or self.location @property def user(self): if not hasattr(self, '_user_cache'): from ebpub.accounts.models import User try: self._user_cache = User.objects.get(id=self.user_id) except User.DoesNotExist: self._user_cache = None return self._user_cache def pid(self): """Place ID as used by ebpub.db.views """ from ebpub.utils.view_utils import make_pid if self.block_center: block = self._get_block() return make_pid(block, 8) else: return make_pid(self.location) def clean(self): from django.core.exceptions import ValidationError error = ValidationError( "SavedPlace must have either a Location or a block_center, but not both" ) if self.block_center and self.location_id: raise error if not (self.block_center or self.location_id): raise error def _get_block(self): if self.block_center is None: return None # We buffer the center a bit because exact intersection # doesn't always get a match. from ebpub.utils.mapmath import buffer_by_meters geom = buffer_by_meters(self.block_center, BLOCK_FUZZY_DISTANCE_METERS) blocks = Block.objects.filter(geom__intersects=geom) if not blocks: raise Block.DoesNotExist( "No block found at lat %s, lon %s" % (self.block_center.y, self.block_center.x)) # If there's more than one this close, we don't really care. return blocks[0] block = property(_get_block) def name(self): if self.location: return self.location.pretty_name else: block = self._get_block() if block: return u'%s block%s around %s' % (self.radius, (self.radius != 1 and 's' or ''), block.pretty_name) return u'(no name)'
class Image(models.Model): jsonld_type = 'ImageObject' # Properties from schema.org/Thing name = models.CharField(verbose_name=_('Name'), max_length=255, db_index=True, default='') data_source = models.ForeignKey(DataSource, related_name='provided_%(class)s_data', db_index=True, null=True) publisher = models.ForeignKey('django_orghierarchy.Organization', verbose_name=_('Publisher'), db_index=True, null=True, blank=True, related_name='Published_images') created_time = models.DateTimeField(auto_now_add=True) last_modified_time = models.DateTimeField(auto_now=True, db_index=True) created_by = models.ForeignKey(User, null=True, blank=True, related_name='EventImage_created_by') last_modified_by = models.ForeignKey( User, related_name='EventImage_last_modified_by', null=True, blank=True) image = models.ImageField(upload_to='images', null=True, blank=True) url = models.URLField(verbose_name=_('Image'), max_length=400, null=True, blank=True) cropping = ImageRatioField('image', '800x800', verbose_name=_('Cropping')) license = models.ForeignKey(License, verbose_name=_('License'), related_name='images', default='cc_by') photographer_name = models.CharField(verbose_name=_('Photographer name'), max_length=255, null=True, blank=True) def save(self, *args, **kwargs): if not self.publisher: try: self.publisher = self.created_by.get_default_organization() except AttributeError: pass # ensure that either image or url is provided if not self.url and not self.image: raise ValidationError(_('You must provide either image or url.')) if self.url and self.image: raise ValidationError( _('You can only provide image or url, not both.')) self.last_modified_time = BaseModel.now() super(Image, self).save(*args, **kwargs) def is_user_editable(self): return self.data_source.user_editable def is_user_edited(self): return bool(self.data_source.user_editable and self.last_modified_by) def can_be_edited_by(self, user): """Check if current event can be edited by the given user""" if user.is_superuser: return True return user.is_admin(self.publisher)
class Photo(models.Model): image = models.ImageField(upload_to='photos') thumbnail = models.ImageField(upload_to='photos', max_length=500, null=True, blank=True) stairid = models.ForeignKey(Stair, null=True, blank=True, related_name='photos', on_delete=models.CASCADE) geom = models.PointField(null=True, blank=True) def __str__(self): return "{} - {} - {}".format(os.path.basename(self.image.url), "geotagged" if self.geom else "", self.stairid) def image_tag(self): return u'<img src="{}" style="height:300px;max-width:100%;"/>'.format( self.image.url) image_tag.short_description = 'Image' image_tag.allow_tags = True def create_thumbnail(self, imagefile): # original code for this method came from # http://snipt.net/danfreak/generate-thumbnails-in-django-with-pil/ # If there is no image associated with this. # do not create thumbnail if not self.image: return # Set our max thumbnail size in a tuple (max width, max height) THUMBNAIL_SIZE = (99, 66) if self.image.name.lower().endswith(".jpg"): PIL_TYPE = 'jpeg' FILE_EXTENSION = 'jpg' DJANGO_TYPE = 'image/jpeg' elif self.image.name.lower().endswith(".png"): PIL_TYPE = 'png' FILE_EXTENSION = 'png' DJANGO_TYPE = 'image/png' # Open original photo which we want to thumbnail using PIL's Image image = Image.open(StringIO(self.image.read())) # We use our PIL Image object to create the thumbnail, which already # has a thumbnail() convenience method that contrains proportions. # Additionally, we use Image.ANTIALIAS to make the image look better. # Without antialiasing the image pattern artifacts may result. image.thumbnail(THUMBNAIL_SIZE, Image.ANTIALIAS) # Save the thumbnail temp_handle = StringIO() image.save(temp_handle, PIL_TYPE) temp_handle.seek(0) # Save image to a SimpleUploadedFile which can be saved into # ImageField suf = SimpleUploadedFile(os.path.split(self.image.name)[-1], temp_handle.read(), content_type=DJANGO_TYPE) # Save SimpleUploadedFile into image field self.thumbnail.save('%s_thumbnail.%s' % (os.path.splitext(suf.name)[0], FILE_EXTENSION), suf, save=False) def get_geotags(self): tags = exifread.process_file(self.image) geotags = {} for k, v in tags.iteritems(): if k.startswith("GPS"): geotags[k] = v if len(geotags) == 0: geotags = False return geotags def make_geom_from_geotags(self, geotags): raw_lat = geotags['GPS GPSLatitude'] raw_long = geotags['GPS GPSLongitude'] ew = str(geotags['GPS GPSLongitudeRef']) ns = str(geotags['GPS GPSLatitudeRef']) # create latitute from tags lat_str = str(raw_lat).lstrip('[').rstrip(']') lat_list = lat_str.split(",") la_deg = int(lat_list[0]) la_dec_deg_num = float(lat_list[1].split("/")[0].lstrip()) la_dec_deg_denom = float(lat_list[1].split("/")[1].lstrip()) la_dec_deg = (la_dec_deg_num / la_dec_deg_denom) / 60 lat = str(la_deg + la_dec_deg) # create longitude from tags lon_str = str(raw_long).lstrip('[').rstrip(']') lon_list = lon_str.split(",") lo_deg = int(lon_list[0]) lo_dec_deg_num = float(lon_list[1].split("/")[0].lstrip()) lo_dec_deg_denom = float(lon_list[1].split("/")[1].lstrip()) lo_dec_deg = (lo_dec_deg_num / lo_dec_deg_denom) / 60 long = str(lo_deg + lo_dec_deg) wkt = "POINT ({} {})".format(long, lat) return wkt def __unicode__(self): return '{"thumbnail": "%s", "image": "%s"}' % (self.thumbnail.url, self.image.url) def save(self, *args, **kwargs): # MANAGE THUMBNAIL CREATION if not self.thumbnail: self.create_thumbnail(self.image) else: fname = os.path.splitext(self.image.url.split("/")[-1])[0] tname = os.path.splitext(self.thumbnail.url.split("/")[-1])[0] if tname <> fname + "_thumbnail": self.create_thumbnail(self.image) force_update = False # If the instance already has been saved, it has an id and we set # force_update to True if self.id: force_update = True # Force an UPDATE SQL query if we're editing the image to avoid integrity exception super(Photo, self).save(force_update=force_update) # GET AND CREATE GEOM FROM EXIF TAGS gts = self.get_geotags() if not gts: #print "no geo tags in photo" return necessary_tags = [ 'GPS GPSLongitude', 'GPS GPSLongitudeRef', 'GPS GPSLatitude', 'GPS GPSLatitudeRef' ] ok = True for nt in necessary_tags: if not nt in gts.keys(): #print "not enough geo tags in photo (missing lat/long info)" return location = self.make_geom_from_geotags(gts) self.geom = location super(Photo, self).save(force_update=force_update)