# -*- coding: utf-8 -*- from django.conf.urls import url from . import views, data_views from fancy_cache import cache_page from django.contrib.auth.decorators import login_required cached = cache_page(300) urlpatterns = [ url('^escuelas.geojson$', cached(views.LugaresVotacionGeoJSON.as_view()), name='geojson'), url('^escuelas/(?P<pk>\d+)$', views.EscuelaDetailView.as_view(), name='detalle_escuela'), url('^mapa/$', login_required(cached(views.Mapa.as_view())), name='mapa'), url('^resultados/(?P<pk>\d+)?$', login_required(cached(views.ResultadosEleccion.as_view())), name='resultados-eleccion'), url(r'^resultados-parciales-(?P<slug_eleccion>[\w-]+).(?P<filetype>csv|xls)$', data_views.resultado_parcial_eleccion, name='resultado-parcial-eleccion'), # url(r'^fiscal_mesa/', views.fiscal_mesa, name='fiscal_mesa'), ]
# -*- coding: utf-8 -*- from django.conf.urls import url from . import views from fancy_cache import cache_page from rest_framework import routers, serializers from rest_framework_jwt.views import obtain_jwt_token cached = cache_page(3600 * 24 * 7) router = routers.DefaultRouter() router.register('mesa', views.MesaViewSet) router.register('mesaDocumento', views.MesaDocumentoViewSet) urlpatterns = [ # url('^escuelas.geojson$', cached(views.LugaresVotacionGeoJSON.as_view()), name='geojson'), # url('^escuelas/(?P<pk>\d+)$', views.EscuelaDetailView.as_view(), name='detalle_escuela'), # url('^mapa/$', cached(views.Mapa.as_view()), name='mapa'), # # url('^mapa/(?P<elecciones_slug>\w+)/$', cached(views.MapaResultadosOficiales.as_view()), name='mapa-resultados'), # url('^mapa/(?P<elecciones_slug>\w+)/(?P<pk>\d+)$', views.ResultadoEscuelaDetailView.as_view()), # url('^mapa/(?P<elecciones_slug>\w+)/resultados.geojson$', cached(views.ResultadosOficialesGeoJSON.as_view()), name='resultados-geojson'), url('^resultados/', cached(views.Resultados.as_view()), name='resultados'), url(r'^login', obtain_jwt_token), ] urlpatterns += router.urls
# -*- coding: utf-8 -*- from django.conf.urls import url from . import views from fancy_cache import cache_page cached = cache_page(3600 * 24 * 7) urlpatterns = [ # url('^escuelas.geojson$', cached(views.LugaresVotacionGeoJSON.as_view()), name='geojson'), # url('^escuelas/(?P<pk>\d+)$', views.EscuelaDetailView.as_view(), name='detalle_escuela'), # url('^mapa/$', cached(views.Mapa.as_view()), name='mapa'), # # url('^mapa/(?P<elecciones_slug>\w+)/$', cached(views.MapaResultadosOficiales.as_view()), name='mapa-resultados'), # url('^mapa/(?P<elecciones_slug>\w+)/(?P<pk>\d+)$', views.ResultadoEscuelaDetailView.as_view()), # url('^mapa/(?P<elecciones_slug>\w+)/resultados.geojson$', cached(views.ResultadosOficialesGeoJSON.as_view()), name='resultados-geojson'), url('^resultados/', cached(views.Resultados.as_view()), name='resultados'), ]
# -*- coding: utf-8 -*- from django.conf.urls import url from . import views from fancy_cache import cache_page cached = cache_page(600) urlpatterns = [ url('^escuelas.geojson$', cached(views.LugaresVotacionGeoJSON.as_view()), name='geojson'), url('^escuelas/(?P<pk>\d+)$', views.EscuelaDetailView.as_view(), name='detalle_escuela'), url('^mapa/$', cached(views.Mapa.as_view()), name='mapa'), # url('^mapa/(?P<elecciones_slug>\w+)/$', cached(views.MapaResultadosOficiales.as_view()), name='mapa-resultados'), url('^mapa/(?P<elecciones_slug>\w+)/(?P<pk>\d+)$', views.ResultadoEscuelaDetailView.as_view()), url('^mapa/(?P<elecciones_slug>\w+)/resultados.geojson$', cached(views.ResultadosOficialesGeoJSON.as_view()), name='resultados-geojson'), url('^resultados/mapa$', cached(views.MapaResultadosOficiales.as_view()), name='resultados-mapa'), #url('^resultados/(?P<tipo>\w+)/(?P<numero>\d+)/(?P<nombre>\w+)$', views.Resultados.as_view(), name='resultados-por'), url('^proyecciones/(?P<eleccion_id>\d+)/$', views.ResultadosProyectadosEleccion.as_view(), name='proyecciones'), url('^resultados/(?P<eleccion_id>\d+)/$',
def vary_on_ArticlesMainView(request): upperhaeding_last_change = UpperHeading.objects.all().values_list( 'change_date', flat=True).latest("change_date") subheading_last_change = SubHeading.objects.all().values_list( 'change_date', flat=True).latest("change_date") timedelta1 = (datetime.datetime.now() - upperhaeding_last_change).seconds > 1 timedelta2 = (datetime.datetime.now() - subheading_last_change).seconds > 1 mark2 = cache.get("mark2") # forms 187 str return "ArticlesMainView+%s+%s+%s+%s" % (mark2, timedelta1, timedelta2, request.user.is_authenticated) # инвалидация в сигналах по урлу и в модели лодки в методе делит и по key_prefix @method_decorator(cache_page(60 * 60 * 24, key_prefix=vary_on_ArticlesMainView), name='dispatch') class ArticlesMainView(TemplateView): template_name = "articles/articles_index.html" """ контроллер показывающий статьи по под-рубрикам + поиск + пагинатор """ def vary_on_paginated_or_not(request): pk = request.get_full_path_info().split("/")[-2] # Смотрим есть ли на странице пагинация или нет по кол-ву выводимых объектов count_eq = Article.objects.filter(foreignkey_to_subheading=int(pk)).count() # время удаления последней статьи try: change_date_of_deleted_article = Article.default.filter(
class ShipmentViewSet(ConfigurableModelViewSet): # We need to revisit the ConfigurableGenericViewSet to ensure # that it properly allow the inheritance of this attribute resource_name = 'Shipment' queryset = Shipment.objects.all() serializer_class = ShipmentSerializer permission_classes = (( HasViewSetActionPermissions, IsOwnerOrShared, ) if settings.PROFILES_ENABLED else (permissions.AllowAny, )) filter_backends = ( filters.SearchFilter, filters.OrderingFilter, DjangoFilterBackend, ) filterset_class = ShipmentFilter search_fields = SHIPMENT_SEARCH_FIELDS ordering_fields = SHIPMENT_ORDERING_FIELDS http_method_names = ['get', 'post', 'delete', 'patch'] configuration = { 'create': ActionConfiguration( request_serializer=ShipmentCreateSerializer, response_serializer=ShipmentTxSerializer, ), 'update': ActionConfiguration(request_serializer=ShipmentUpdateSerializer, response_serializer=ShipmentTxSerializer, permission_classes=WRITE_PERMISSIONS), 'retrieve': ActionConfiguration(permission_classes=READ_PERMISSIONS), 'destroy': ActionConfiguration(permission_classes=DELETE_PERMISSIONS), } def get_queryset(self): queryset = self.queryset if settings.PROFILES_ENABLED: queryset_filter = owner_access_filter(self.request) permission_link = self.request.query_params.get('permission_link') if permission_link: # The validity of the permission link object has already been done by the permission class queryset_filter = queryset_filter | Q( pk=PermissionLink.objects.get( pk=permission_link).shipment.pk) elif self.detail: return queryset else: queryset_filter = queryset_filter | shipment_list_wallets_filter( self.request) queryset_filter = queryset_filter | Q( pk__in=self.request.user.access_request_shipments) queryset = queryset.filter(queryset_filter) queryset = self._parse_customer_fields_queries(queryset) return queryset def _parse_customer_fields_queries(self, queryset): for key, value in self.request.query_params.items(): if key.startswith('customer_fields__'): queryset = queryset.filter(**{key: parse_value(value)}) return queryset def perform_create(self, serializer): if settings.PROFILES_ENABLED: background_data_hash_interval = self.request.user.token.get( 'background_data_hash_interval', settings.DEFAULT_BACKGROUND_DATA_HASH_INTERVAL) manual_update_hash_interval = self.request.user.token.get( 'manual_update_hash_interval', settings.DEFAULT_MANUAL_UPDATE_HASH_INTERVAL) created = serializer.save( owner_id=get_owner_id(self.request), updated_by=self.request.user.id, background_data_hash_interval=background_data_hash_interval, manual_update_hash_interval=manual_update_hash_interval) else: created = serializer.save(background_data_hash_interval=settings. DEFAULT_BACKGROUND_DATA_HASH_INTERVAL, manual_update_hash_interval=settings. DEFAULT_MANUAL_UPDATE_HASH_INTERVAL) return created def perform_update(self, serializer): return serializer.save(updated_by=self.request.user.id) def create(self, request, *args, **kwargs): """ Create a Shipment object and make Async Request to Engine """ LOG.debug('Creating a shipment object.') log_metric('transmission.info', tags={ 'method': 'shipments.create', 'module': __name__ }) # Create Shipment serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) shipment = self.perform_create(serializer) async_job = shipment.asyncjob_set.all()[:1] response = self.get_serializer( shipment, serialization_type=SerializationType.RESPONSE) if async_job: LOG.debug(f'AsyncJob created with id {async_job[0].id}.') response.instance.async_job_id = async_job[0].id return Response(response.data, status=status.HTTP_202_ACCEPTED) @method_decorator(cache_page(60 * 60, remember_all_urls=True) ) # Cache responses for 1 hour @action( detail=True, methods=['get'], permission_classes=(IsOwnerOrShared | AccessRequest.permission( Endpoints.tracking, PermissionLevel.READ_ONLY), ), ) def tracking(self, request, version, pk): """ Retrieve tracking data from db """ LOG.debug(f'Retrieve tracking data for a shipment {pk}.') log_metric('transmission.info', tags={ 'method': 'shipments.tracking', 'module': __name__ }) shipment = self.get_object() if hasattr(shipment, 'routeleg'): if shipment.state == TransitState.AWAITING_PICKUP: # RouteTrackingData may contain data for other shipments already picked up. # This shipment should not include those data as it has not yet begun transit. tracking_data = RouteTrackingData.objects.none() else: tracking_data = RouteTrackingData.objects.filter( route__id=shipment.routeleg.route.id) else: tracking_data = TrackingData.objects.filter( shipment__id=shipment.id) response = Template('{"data": $geojson}') response = response.substitute( geojson=render_filtered_point_features(shipment, tracking_data)) return HttpResponse(content=response, content_type='application/vnd.api+json') def update(self, request, *args, **kwargs): """ Update the shipment with new details, overwriting the built-in method """ partial = kwargs.pop('partial', False) instance = self.get_object() LOG.debug(f'Updating shipment {instance.id} with new details.') log_metric('transmission.info', tags={ 'method': 'shipments.update', 'module': __name__ }) serializer = self.get_serializer(instance, data=request.data, partial=partial) serializer.is_valid(raise_exception=True) shipment = self.perform_update(serializer) async_jobs = shipment.asyncjob_set.filter( state__in=[JobState.PENDING, JobState.RUNNING]) response = self.get_serializer( shipment, serialization_type=SerializationType.RESPONSE) response.instance.async_job_id = async_jobs.latest( 'created_at').id if async_jobs else None return Response(response.data, status=status.HTTP_202_ACCEPTED)
def vary_on_database(request): cache_key = "BoatListView" data_obj = cache.get(cache_key) if not data_obj and BoatModel.objects.all().exists(): # на случай пустой страницы списка # лодок change_obj = BoatModel.objects.all().values_list("change_date", flat=True).latest( "change_date").timestamp() count_obj = BoatModel.objects.all().count() data_obj = "%s+%s" % (change_obj, count_obj) cache.set(cache_key, data_obj, 60*60*24) return "BoatListView+%s+%s" % (str(data_obj), str(request.user.is_authenticated)) @method_decorator(cache_page(60*60*24, key_prefix=vary_on_database), name="dispatch") class BoatListView(SearchableListMixin, ListView): model = BoatModel template_name = "boats.html" paginate_by = 10 search_fields = ["boat_name", ] def get_ordering(self): """метод возвращает поле по которому идет сортировка!""" self.field = self.request.GET.get('ordering') self.mark = self.request.GET.get("mark") if self.field == '' or self.mark == "": messages.add_message(self.request, messages.WARNING, message="Please choose " "sorting pattern", fail_silently=True) return None if all([self.field, self.mark]):
class CmsPagesViewSet(PagesAPIViewSet): base_serializer_class = CmsPageSerializer known_query_parameters = BaseAPIViewSet.known_query_parameters.union( ['type', 'child_of', 'descendant_of', 'lang', 'rev', 'locale']) body_fields = BaseAPIViewSet.body_fields + [ 'title', ] meta_fields = BaseAPIViewSet.meta_fields + [ 'html_url', 'url_path', 'slug', 'show_in_menus', 'seo_title', 'search_description', 'first_published_at', 'parent', 'children', 'locale' ] listing_default_fields = BaseAPIViewSet.listing_default_fields + [ 'title', 'html_url', 'slug', 'first_published_at', 'url_path' ] nested_default_fields = BaseAPIViewSet.nested_default_fields + [ 'title', ] detail_only_fields = ['parent'] lookup_field = 'url_path' lookup_url_kwarg = None def _activate_language(self, request): lang = request.query_params.get('lang', 'pl') if lang in settings.LANGUAGE_CODES: activate(lang) def is_superuser_rev_request(self): return bool(self.request.user and self.request.user.is_superuser and self.request.query_params.get('rev') and self.kwargs.get('url_path')) def get_object(self): queryset = self.filter_queryset(self.get_queryset()) # Perform the lookup filtering. lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field assert lookup_url_kwarg in self.kwargs, ( 'Expected view %s to be called with a URL keyword argument ' 'named "%s". Fix your URL conf, or set the `.lookup_field` ' 'attribute on the view correctly.' % (self.__class__.__name__, lookup_url_kwarg)) if self.is_superuser_rev_request(): rev_id = self.request.query_params.get('rev') query_filter = { 'revisions__content_json__icontains': '"{}": "{}"'.format(self.lookup_field, self.kwargs[lookup_url_kwarg]) } obj = queryset.filter( **query_filter).order_by('revisions__id').first() if not obj: raise Http404('No Page matches the given query.') if rev_id in ('latest', '-1'): obj = obj.get_latest_revision_as_page() else: try: rev_id = int(rev_id) revision = obj.revisions.get(pk=rev_id) obj = revision.as_page_object() except (ValueError, PageRevision.DoesNotExist): raise Http404('No such Page Revision.') else: filter_kwargs = {self.lookup_field: self.kwargs[lookup_url_kwarg]} obj = get_object_or_404(queryset, **filter_kwargs) obj = obj.specific return obj @method_decorator( cache_page( settings.CMS_API_CACHE_TIMEOUT, key_prefix='cms-api', remember_all_urls=True, remember_stats_all_urls=True, )) @method_decorator(vary_on_cookie) def page_view(self, request, url_path=None): return self._page_view(request, url_path=url_path) def _page_view(self, request, url_path=None): url_path = '' if not url_path else url_path url_parts = [part.strip('/') for part in url_path.split('/') if part] self.kwargs['url_path'] = '/{}/'.format('/'.join(url_parts)).replace( '//', '/') instance = self.get_object() self._activate_language(request) serializer = self.get_serializer(instance) return Response(serializer.data) def get_queryset(self): self._activate_language(self.request) request = self.request try: models = page_models_from_string( request.GET.get('type', 'wagtailcore.Page')) except (LookupError, ValueError): raise BadRequestError("type doesn't exist") _qs = Page.objects.all().public() if not (request.user and request.user.is_superuser): _qs = _qs.live() if self.request.site: _qs = _qs.descendant_of(self.request.site.root_page, inclusive=True) else: # No sites configured _qs = _qs.none() if not models: return _qs elif len(models) == 1: return models[0].objects.filter( id__in=_qs.values_list('id', flat=True)) else: # len(models) > 1 return filter_page_type(_qs, models) @classmethod def get_urlpatterns(cls): return [ url(r'^$', cls.as_view({'get': 'page_view'}), name='detail'), url(r'^(?P<url_path>.*)/$', cls.as_view({'get': 'page_view'}), name='detail'), ] @classmethod def get_object_detail_urlpath(cls, model, url_path, namespace=''): if namespace: url_name = namespace + ':detail' else: url_name = 'detail' url_path = url_path.strip('/') result = reverse(url_name, args=(url_path, )) return result def post(self, request, url_path, **kwargs): # return Response(status=200) try: url_path = url_path.strip() page = Page.objects.get(url_path='/{}/'.format(url_path)) return page.specific.save_post(request) except Page.DoesNotExist: raise NotFound raise MethodNotAllowed(method='POST')
In order to invalidate this cache we use Django signals in rsexamples/signals.py which would delete this cache_key when data gets created/updated or deleted. """ cache_key = "BoatListView" data_obj = cache.get(cache_key) if not data_obj: timestamp = BoatModel.objects.all().values_list( "change_date", flat=True).latest("change_date").timestamp() boats_count = BoatModel.objects.all().count() data_obj = "%s+%s" % (timestamp, boats_count) cache.set(cache_key, data_obj, 60 * 60 * 24) return "BoatListView+%s+%s" % (str(data_obj), str(request.user.is_authenticated)) @method_decorator(cache_page(60 * 60 * 24, key_prefix=vary_on_database), name="dispatch") class BoatListView(ListView): """View shows list of a boats.""" model = BoatModel template_name = "boats.html" paginate_by = 10 @method_decorator(cache_page(60 * 60 * 24), name="dispatch") class BoatListView_2(ListView): """ Similar to view above but uses simple cache invalidation in signals rsexamples/signals.py". Its ok for our example but if you have ,for example, complex SQL or any other code which causes delays , it would invalidate whole page and you would need to get all data and calculations again. It uses URL as cache identification.
class TelemetryViewSet(mixins.ListModelMixin, viewsets.GenericViewSet): permission_classes = ( (ShipmentExists, IsOwnerOrShared | AccessRequest.permission(Endpoints.telemetry, PermissionLevel.READ_ONLY), ) if settings.PROFILES_ENABLED else (permissions.AllowAny, ShipmentExists, ) ) filter_backends = (filters.OrderingFilter, DjangoFilterBackend, ) filterset_class = TelemetryFilter renderer_classes = (JSONRenderer,) def _validate_query_parameters(self): segment = self.request.query_params.get('per', None) aggregate = self.request.query_params.get('aggregate', None) before = self.request.query_params.get('before', None) after = self.request.query_params.get('after', None) if aggregate and aggregate not in Aggregates.__members__: raise ValidationError(f'Invalid aggregate supplied should be in: {list(Aggregates.__members__.keys())}') if segment and segment not in TimeTrunc.__members__: raise ValidationError(f'Invalid time selector supplied, should be in: {list(TimeTrunc.__members__.keys())}') if not aggregate and segment: raise ValidationError(f'No aggregator supplied with time selector. ' f'Should be in {list(Aggregates.__members__.keys())}') if aggregate and not segment: raise ValidationError(f'No time selector supplied with aggregation. ' f'Should be in {list(TimeTrunc.__members__.keys())}') if before and after and (dateutil.parser.parse(before) > dateutil.parser.parse(after)): raise ValidationError(f'Invalid timemismatch applied. ' f'Before timestamp {before} is greater than after: {after}') def _truncate_time(self): segment = self.request.query_params.get('per') return TimeTrunc[segment].value('timestamp') def _aggregate_queryset(self, queryset): aggregate = self.request.query_params.get('aggregate', None) if not aggregate: return queryset method = Aggregates[aggregate].value queryset = queryset.annotate( window=self._truncate_time() # Adds a column 'window' that is a truncated timestamp ).values( 'sensor_id', 'hardware_id', 'window' # Adds a GROUP BY for sensor_id/hardware_id/window ).annotate( aggregate_value=method('value') # Calls aggregation function for sensor_id/window group ).order_by('window') # Clears default ordering, see: # https://docs.djangoproject.com/en/2.2/topics/db/aggregation/#interaction-with-default-ordering-or-order-by return queryset def get_queryset(self): shipment = Shipment.objects.get(pk=self.kwargs['shipment_pk']) begin = (shipment.pickup_act or datetime.min).replace(tzinfo=timezone.utc) end = (shipment.delivery_act or datetime.max).replace(tzinfo=timezone.utc) if hasattr(shipment, 'routeleg'): if shipment.state == TransitState.AWAITING_PICKUP: # RouteTelemetryData may contain data for other shipments already picked up. # This shipment should not include those data as it has not yet begun transit. queryset = RouteTelemetryData.objects.none() else: queryset = RouteTelemetryData.objects.filter(route__id=shipment.routeleg.route.id) self.filterset_class = RouteTelemetryFilter else: queryset = TelemetryData.objects.filter(shipment__id=shipment.id) return queryset.filter(timestamp__range=(begin, end)) def get_serializer_class(self): shipment = Shipment.objects.get(pk=self.kwargs['shipment_pk']) aggregate = self.request.query_params.get('aggregate', None) if hasattr(shipment, 'routeleg'): return RouteTelemetryResponseAggregateSerializer if aggregate else RouteTelemetryResponseSerializer return TelemetryResponseAggregateSerializer if aggregate else TelemetryResponseSerializer @method_decorator(cache_page(60 * 60, remember_all_urls=True)) # Cache responses for 1 hour def list(self, request, *args, **kwargs): self._validate_query_parameters() queryset = self.filter_queryset(self.get_queryset()) queryset = self._aggregate_queryset(queryset) serializer = self.get_serializer(queryset, many=True) return Response(serializer.data)