def validate_segment_size(self, request=None, size=None): size = get_param(request, "segment_size", coerce=int, required=False) or 0 if size is None else size if SEGMENT_ALLOWABLE_SIZE < size: raise SuspiciousOperation("Segment is too large!")
def get_query_set(self): qs = self.root_query_set lookup_params = self.params.copy() # a dictionary of the query string for i in (ALL_VAR, ORDER_VAR, ORDER_TYPE_VAR, SEARCH_VAR, IS_POPUP_VAR): if i in lookup_params: del lookup_params[i] for key, value in lookup_params.items(): if not isinstance(key, str): # 'key' will be used as a keyword argument later, so Python # requires it to be a string. del lookup_params[key] lookup_params[smart_str(key)] = value # if key ends with __in, split parameter into separate values if key.endswith('__in'): value = value.split(',') lookup_params[key] = value # if key ends with __isnull, special case '' and false if key.endswith('__isnull'): if value.lower() in ('', 'false'): value = False else: value = True lookup_params[key] = value if not self.model_admin.lookup_allowed(key, value): raise SuspiciousOperation("Filtering by %s not allowed" % key) # Apply lookup parameters from the query string. try: qs = qs.filter(**lookup_params) # Naked except! Because we don't have any other way of validating "params". # They might be invalid if the keyword arguments are incorrect, or if the # values are not in the correct type, so we might get FieldError, ValueError, # ValicationError, or ? from a custom field that raises yet something else # when handed impossible data. except: raise IncorrectLookupParameters # Use select_related() if one of the list_display options is a field # with a relationship and the provided queryset doesn't already have # select_related defined. if not qs.query.select_related: if self.list_select_related: qs = qs.select_related() else: for field_name in self.list_display: try: f = self.lookup_opts.get_field(field_name) except models.FieldDoesNotExist: pass else: if isinstance(f.rel, models.ManyToOneRel): qs = qs.select_related() break # Set ordering. if self.order_field: qs = qs.order_by( '%s%s' % ((self.order_type == 'desc' and '-' or ''), self.order_field)) # Apply keyword searches. def construct_search(field_name): if field_name.startswith('^'): return "%s__istartswith" % field_name[1:] elif field_name.startswith('='): return "%s__iexact" % field_name[1:] elif field_name.startswith('@'): return "%s__search" % field_name[1:] else: return "%s__icontains" % field_name if self.search_fields and self.query: for bit in self.query.split(): or_queries = [ models.Q(**{construct_search(str(field_name)): bit}) for field_name in self.search_fields ] qs = qs.filter(reduce(operator.or_, or_queries)) for field_name in self.search_fields: if '__' in field_name: qs = qs.distinct() break return qs
def get_list_queryset(self, queryset): lookup_params = dict([ (smart_str(k)[len(FILTER_PREFIX):], v) for k, v in self.admin_view.params.items() if smart_str(k).startswith(FILTER_PREFIX) and v != '' ]) for p_key, p_val in iteritems(lookup_params): if p_val == "False": lookup_params[p_key] = False use_distinct = False # for clean filters self.admin_view.has_query_param = bool(lookup_params) self.admin_view.clean_query_url = self.admin_view.get_query_string( remove=[ k for k in self.request.GET.keys() if k.startswith(FILTER_PREFIX) ]) # Normalize the types of keys if not self.free_query_filter: for key, value in lookup_params.items(): if not self.lookup_allowed(key, value): raise SuspiciousOperation("Filtering by %s not allowed" % key) self.filter_specs = [] if self.list_filter: for list_filter in self.list_filter: if callable(list_filter): # This is simply a custom list filter class. spec = list_filter(self.request, lookup_params, self.model, self) else: field_path = None field_parts = [] if isinstance(list_filter, (tuple, list)): # This is a custom FieldListFilter class for a given field. field, field_list_filter_class = list_filter else: # This is simply a field name, so use the default # FieldListFilter class that has been registered for # the type of the given field. field, field_list_filter_class = list_filter, filter_manager.create if not isinstance(field, models.Field): field_path = field field_parts = get_fields_from_path( self.model, field_path) field = field_parts[-1] spec = field_list_filter_class(field, self.request, lookup_params, self.model, self.admin_view, field_path=field_path) if len(field_parts) > 1: # Add related model name to title spec.title = "%s %s" % (field_parts[-2].name, spec.title) # Check if we need to use distinct() use_distinct = (use_distinct or lookup_needs_distinct( self.opts, field_path)) if spec and spec.has_output(): try: new_qs = spec.do_filte(queryset) except ValidationError as e: new_qs = None self.admin_view.message_user( _("<b>Filtering error:</b> %s") % e.messages[0], 'error') if new_qs is not None: queryset = new_qs self.filter_specs.append(spec) self.has_filters = bool(self.filter_specs) self.admin_view.filter_specs = self.filter_specs obj = filter(lambda f: f.is_used, self.filter_specs) if six.PY3: obj = list(obj) self.admin_view.used_filter_num = len(obj) try: for key, value in lookup_params.items(): use_distinct = (use_distinct or lookup_needs_distinct(self.opts, key)) except FieldDoesNotExist as e: raise IncorrectLookupParameters(e) try: # fix a bug by david: In demo, quick filter by IDC Name() cannot be used. if isinstance(queryset, models.query.QuerySet) and lookup_params: new_lookup_parames = dict() for k, v in lookup_params.items(): list_v = v.split(',') if len(list_v) > 0: new_lookup_parames.update({k: list_v}) else: new_lookup_parames.update({k: v}) queryset = queryset.filter(**new_lookup_parames) except (SuspiciousOperation, ImproperlyConfigured): raise except Exception as e: raise IncorrectLookupParameters(e) else: if not isinstance(queryset, models.query.QuerySet): pass query = self.request.GET.get(SEARCH_VAR, '') # Apply keyword searches. def construct_search(field_name): if field_name.startswith('^'): return "%s__istartswith" % field_name[1:] elif field_name.startswith('='): return "%s__iexact" % field_name[1:] elif field_name.startswith('@'): return "%s__search" % field_name[1:] else: return "%s__icontains" % field_name if self.search_fields and query: orm_lookups = [ construct_search(str(search_field)) for search_field in self.search_fields ] for bit in query.split(): or_queries = [ models.Q(**{orm_lookup: bit}) for orm_lookup in orm_lookups ] queryset = queryset.filter(reduce(operator.or_, or_queries)) if not use_distinct: for search_spec in orm_lookups: if lookup_needs_distinct(self.opts, search_spec): use_distinct = True break self.admin_view.search_query = query if use_distinct: return queryset.distinct() else: return queryset
def edit_participation(pctx, participation_id): # type: (CoursePageContext, int) -> http.HttpResponse if not pctx.has_permission(pperm.edit_participation): raise PermissionDenied() request = pctx.request num_participation_id = int(participation_id) if num_participation_id == -1: participation = Participation( course=pctx.course, status=participation_status.active) add_new = True else: participation = get_object_or_404(Participation, id=num_participation_id) add_new = False if participation.course.id != pctx.course.id: raise SuspiciousOperation("may not edit participation in different course") if request.method == 'POST': form = EditParticipationForm( add_new, pctx, request.POST, instance=participation) reset_form = False try: if form.is_valid(): if "submit" in request.POST: form.save() messages.add_message(request, messages.SUCCESS, _("Changes saved.")) elif "approve" in request.POST: send_enrollment_decision(participation, True, pctx.request) # FIXME: Double-saving participation = form.save() participation.status = participation_status.active participation.save() reset_form = True messages.add_message(request, messages.SUCCESS, _("Successfully enrolled.")) elif "deny" in request.POST: send_enrollment_decision(participation, False, pctx.request) # FIXME: Double-saving participation = form.save() participation.status = participation_status.denied participation.save() reset_form = True messages.add_message(request, messages.SUCCESS, _("Successfully denied.")) elif "drop" in request.POST: # FIXME: Double-saving participation = form.save() participation.status = participation_status.dropped participation.save() reset_form = True messages.add_message(request, messages.SUCCESS, _("Successfully dropped.")) except IntegrityError as e: messages.add_message(request, messages.ERROR, _("A data integrity issue was detected when saving " "this participation. Maybe a participation for " "this user already exists? (%s)") % str(e)) if reset_form: form = EditParticipationForm( add_new, pctx, instance=participation) else: form = EditParticipationForm(add_new, pctx, instance=participation) return render_course_page(pctx, "course/generic-course-form.html", { "form_description": _("Edit Participation"), "form": form })
def authenticate(self, nonce, request): """ Authenticates users in case of the OpenID Connect Authorization code flow. """ # NOTE: the request object is mandatory to perform the authentication using an authorization # code provided by the OIDC supplier. if (nonce is None and oidc_rp_settings.USE_NONCE) or request is None: return # Fetches required GET parameters from the HTTP request object. state = request.GET.get('state') code = request.GET.get('code') # Don't go further if the state value or the authorization code is not present in the GET # parameters because we won't be able to get a valid token for the user in that case. if state is None or code is None: raise SuspiciousOperation( 'Authorization code or state value is missing') # Prepares the token payload that will be used to request an authentication token to the # token endpoint of the OIDC provider. token_payload = { 'client_id': oidc_rp_settings.CLIENT_ID, 'client_secret': oidc_rp_settings.CLIENT_SECRET, 'grant_type': 'authorization_code', 'code': code, 'redirect_uri': request.build_absolute_uri(reverse('oidc_auth_callback')), } # Calls the token endpoint. token_response = requests.post( oidc_rp_settings.PROVIDER_TOKEN_ENDPOINT, data=token_payload) token_response.raise_for_status() token_response_data = token_response.json() # Validates the token. raw_id_token = token_response_data.get('id_token') id_token = validate_and_return_id_token(raw_id_token, nonce) if id_token is None: return # Retrieves the access token and refresh token. access_token = token_response_data.get('access_token') refresh_token = token_response_data.get('refresh_token') # Stores the ID token, the related access token and the refresh token in the session. request.session['oidc_auth_id_token'] = raw_id_token request.session['oidc_auth_access_token'] = access_token request.session['oidc_auth_refresh_token'] = refresh_token # If the id_token contains userinfo scopes and claims we don't have to hit the userinfo # endpoint. if oidc_rp_settings.ID_TOKEN_INCLUDE_USERINFO: userinfo_data = id_token else: # Fetches the user information from the userinfo endpoint provided by the OP. userinfo_response = requests.get( oidc_rp_settings.PROVIDER_USERINFO_ENDPOINT, headers={'Authorization': 'Bearer {0}'.format(access_token)}) userinfo_response.raise_for_status() userinfo_data = userinfo_response.json() # The e-mail address is mandatory for most applications so we return immediately if we # cannot find it in the user's claims. if 'email' not in userinfo_data: return # Tries to retrieve a corresponding user in the local database and creates it if applicable. try: oidc_user = OIDCUser.objects.select_related('user').get( sub=userinfo_data.get('sub')) except OIDCUser.DoesNotExist: oidc_user = create_oidc_user_from_claims(userinfo_data) oidc_user_created.send(sender=self.__class__, request=request, oidc_user=oidc_user) else: update_oidc_user_from_claims(oidc_user, userinfo_data) # Runs a custom user details handler if applicable. Such handler could be responsible for # creating / updating whatever is necessary to manage the considered user (eg. a profile). user_details_handler = import_string(oidc_rp_settings.USER_DETAILS_HANDLER) \ if oidc_rp_settings.USER_DETAILS_HANDLER is not None else None if user_details_handler is not None: user_details_handler(oidc_user, userinfo_data) return oidc_user.user
def clean(self): super().clean() if self.cleaned_data.get("DELETE") and not self.instance.can_be_deleted_by_manager: raise SuspiciousOperation("Deleting course type not allowed")
def _path(self, name): name = _clean_name_dance(name) try: return safe_join(self.location, name) except ValueError: raise SuspiciousOperation("Attempted access to '%s' denied." % name)
def get_enroll_secret(self, data): try: return data["enroll_secret"] except KeyError: raise SuspiciousOperation( "Missing enroll_secret key in osquery enroll request")
def point(request): if request.method != 'POST': raise SuspiciousOperation('INVALID_METHOD') service, [sid, delta, message, lower_bound], timestamp = check_sign( request.POST, ['sid', 'delta', 'message', 'lower_bound'], ) m = ServiceMap.objects.filter(sid=sid, service=service).first() if not m: raise SuspiciousOperation('INVALID_CALL') try: delta = 0 if not delta else int(delta) lower_bound = 0 if not lower_bound else int(lower_bound) except: raise SuspiciousOperation('INVALID_TYPE') if delta != 0 and not message: raise SuspiciousOperation('INVALID_MESSAGE') profile = m.user.profile is_test_app = service.scope == 'TEST' point = profile.point_test if is_test_app else profile.point modified = False if delta and point >= lower_bound: if is_test_app: profile.point_test += delta else: profile.point += delta profile.save() point += delta modified = True manager = m.user.point_logs if manager.count() >= 20: manager.order_by('time')[0].delete() logger.info( 'point', { 'r': request, 'uid': m.user.username, 'hide': True, 'extra': [ ('app', service.name), ('delta', delta), ], }) PointLog(user=m.user, service=service, delta=delta, point=profile.point, action=message).save() return HttpResponse( json.dumps({ 'point': point, 'modified': modified }), content_type='application/json', )
def obtener_count_agente(self): try: return self.values('agente_id').annotate( cantidad=Count('agente_id')).order_by('agente_id') except LlamadaLog.DoesNotExist: raise (SuspiciousOperation(_("No se encontraron llamadas ")))
def get_columns_name_for_describable_content_type(request, content_type_name): """ todo: modify this old description According to a specified model, retrieve any layouts of descriptors, and from them, returns any information about theirs related type of models of descriptors. Additionally, if the model offers a get_default_columns method, then the returned object will contains information returns by this method for the model standard fields. For example, a model offering a name field, information about this columns can be defined like a descriptor into a dict. :param request: :param content_type_name: Module.model content type name. :return: An array of dict defining all available columns related to this model. """ app_label, model_name = content_type_name.split('.') content_type = get_object_or_404(ContentType, app_label=app_label, model=model_name) layouts_list = request.GET.get('layouts') mode = request.GET.get( 'mode' ) # define the context, for example: in "search" mode some columns are not display layouts_ids = None if layouts_list: if type(layouts_list) is not str: raise SuspiciousOperation( _('Invalid layout list parameter format')) layouts_ids = [int(x) for x in layouts_list.split(',')] layouts_ids.sort() if mode == 'search': cache_name = cache_manager.make_cache_name( content_type_name, ','.join(map(str, layouts_ids)), mode) else: cache_name = cache_manager.make_cache_name( content_type_name, ','.join(map(str, layouts_ids))) else: if mode == 'search': cache_name = cache_manager.make_cache_name(content_type_name, mode) else: cache_name = cache_manager.make_cache_name(content_type_name) results = cache_manager.get('entity_columns', cache_name) if results is not None: return HttpResponseRest(request, results) layouts = Layout.objects.filter(target=content_type) if layouts_ids: layouts = layouts.filter(pk__in=layouts_ids) columns = {} # add descriptor model type information for each descriptors attached to any layout related to the # entity model for layout in layouts: if layout.layout_content.get('panels'): for panel in layout.layout_content.get('panels'): for descriptor in panel.get('descriptors'): descriptor = Descriptor.objects.get( name=descriptor.get('name')) dft = DescriptorFormatTypeManager.get(descriptor.format) query = True if dft.related_model( descriptor.format) else False # display_fields comes by default from dft is defined if dft.display_fields is not None and 'display_fields' not in descriptor.format: descriptor.format[ 'display_fields'] = dft.display_fields if (dft.column_display is True and not mode) or (dft.search_display is True and mode == 'search'): columns['#' + descriptor.code] = { 'id': descriptor.id, 'group_name': descriptor.group_name, 'label': descriptor.get_label(), 'query': query, 'format': descriptor.format, 'available_operators': dft.available_operators } # for dmt in dmts: # descriptor_format = dmt.descriptor_type.format # dft = DescriptorFormatTypeManager.get(descriptor_format) # # query = True if dft.related_model(descriptor_format) else False # # # display_fields comes by default from dft is defined # if dft.display_fields is not None and 'display_fields' not in descriptor_format: # descriptor_format['display_fields'] = dft.display_fields # # if (dft.column_display is True and not mode) or (dft.search_display is True and mode == 'search'): # columns['#' + dmt.name] = { # 'group': dmt.descriptor_type.group_id, # 'type': dmt.descriptor_type_id, # 'label': dmt.get_label(), # 'query': query, # 'format': descriptor_format, # 'available_operators': dft.available_operators # } # and add standard columns information if the models defines a get_default_columns method model_class = content_type.model_class() if hasattr(model_class, 'get_defaults_columns'): for name, column in model_class.get_defaults_columns().items(): descriptor_format = column.get('format') descriptor_group_name = column.get('group_name', None) # get group id and type id from descriptor type code code = column.get('code') if code: descriptor = Descriptor.objects.get(code=code) # if column.get('column_display', True): if (column.get('column_display', True) and not mode) or (column.get('search_display', True) and mode == 'search'): columns[name] = { 'group_name': descriptor_group_name, 'field': column.get('field', None), 'label': column.get('label', name), 'query': column.get('query', False), 'format': descriptor_format, 'available_operators': column.get('available_operators') } results = {'cacheable': True, 'validity': None, 'columns': columns} # cache for 1 day cache_manager.set('entity_columns', cache_name, results, 60 * 60 * 24) return HttpResponseRest(request, results)
def post(self, request): index = request.POST.get("index", "") filename = request.POST.get("filename", "") algorithm = request.POST.get("algorithm", "") digest = request.POST.get("digest", "") self.validate_algorithm(algorithm) upload, created = Upload.objects.get_or_create( defaults={"filename": filename}, **get_upload_lookups(request)) upload.full_clean() if index: if upload.file: raise StateConflictError('already materialized') self.validate_segment_count(count=upload.segments.count()) uploaded_file = request.FILES["file"] self.validate_segment_size(size=uploaded_file.size) segment = UploadSegment.objects.get_or_create(upload=upload, index=index)[0] segment.attempt_count += 1 if getattr(settings, 'UPLOADS_SEGMENT_MAX_ATTEMPT_COUNT', 3) < segment.attempt_count: raise SuspiciousOperation( "Segment has been uploaded too many times!") replace_file = True if segment.file: if digest: try: self.validate_digest(digest, segment.get_digest, algorithm=algorithm) except ValidationError: segment.file.delete(save=False) except FileNotFoundError: logger.warning( 'Encountered situation where segment %s file did not exist for upload %s when it should. Proceeding with file replacement.', segment.pk, upload.pk) else: replace_file = False if replace_file: name = '{upload}-{segment}-{index}-{attempt}-{filename}'.format( upload=upload.pk, segment=segment.pk, index=segment.index, attempt=segment.attempt_count, filename=filename, ) segment.file.save(name, uploaded_file, save=False) try: segment.full_clean() except ValidationError: segment.file.delete(save=False) raise segment.save() if digest: segment.refresh_from_db(fields=['file']) self.validate_digest(digest, segment.get_digest, algorithm=algorithm) else: if created: raise SuspiciousOperation( "Upload cannot be created and finalized in same request!") if not upload.file: try: result = upload.materialize(algorithm=algorithm) except RedisLockError: logger.exception( 'Unable to obtain lock for materialization of upload %s', upload.pk) else: if result: for receiver, url in result: if url: return HttpResponse(url, status=300) if upload.file: try: self.validate_digest(digest, upload.digest) except ValidationError: try: upload.delete() except models.ProtectedError: # If we get here, the user has previously uploaded this file successfully (probably with a different client) # and should be actively working with it, so we cannot just clear the other secrets. This would be an # unlikely occurrence that we do not expect to actually happen. If we do get here, it is likely that the # clients are using different digest algorithms with the same upload identifier. This is currently not # supported. So we are catching the exception and logging it. If this becomes a common occurrence, we could # add an additional step for calculating the digest and storing it separately from the upload instance. As of # now, that is unnecessary and the client is to be blamed for malfunctioning. They should include the algorithm # used to compute the digest in their upload identifier string. message = 'Failed to cleanup after digest mismatch for protected upload %s' % upload.pk logger.exception(message) raise StateConflictError(message) raise secret = UploadSecret.objects.create(upload=upload) return HttpResponse(secret.value) return HttpResponse('')
def validate_algorithm(self, algorithm): if algorithm and algorithm not in hasher_map.keys(): raise SuspiciousOperation("Unsupported algorithm!")
def validate_total_size(self, request=None, size=None): size = get_param(request, "total_size", coerce=int, required=False) or 0 if size is None else size if SEGMENT_ALLOWABLE_SIZE * SEGMENT_LIMIT < size: raise SuspiciousOperation("File is too large!")
def exportMeasurementData(req): """ Returns the measurement data from a list of reservoirs in Excel compatible format (csv or xls) """ availableFormats = ['csv', 'xls'] if not 'format' in req.GET or \ not req.GET['format'] in availableFormats or \ not 'dateFrom' in req.GET or \ not 'dateUntil' in req.GET or \ not 'reservoirs' in req.GET : print('[exportMeasurementData] missing parameters') raise FieldError('Pedido Inválido') # names that will be shown in the header of Excel table dataColumns = [ 'Reservatório', 'Data e Hora', 'Nivel de Água', 'pH', 'Condutividade', 'Salinidade', 'TDS' ] # the attributes to export attrKeys = [ 'reservoir__res_id', 'dateTime', 'waterLevel', 'pH', 'conductivity', 'salinity', 'tds' ] try: dtFrom = timezone.datetime.strptime(req.GET['dateFrom'], '%Y-%m-%d') dtUntil = timezone.datetime.strptime(req.GET['dateUntil'], '%Y-%m-%d') data = Measurement.objects \ .filter(dateTime__gte=dtFrom, dateTime__lte=dtUntil) \ .values(*attrKeys) if req.GET['reservoirs'] != 'all' or req.GET['reservoirs'] == '': data.filter(reservoir__res_id__in=req.GET['reservoirs'].split(',')) # change dates to string values for value in data: value['dateTime'] = value['dateTime'].strftime('%d-%m-%Y %H:%M') except Exception as e: print('[exportMeasurementData] failed to read data: {}'.format(e)) raise SuspiciousOperation('Não foi possivel ler os dados') if req.GET['format'] == 'csv': response = HttpResponse(content_type='text/csv') response[ 'Content-Disposition'] = 'attachement; filename="exported_data.csv"' writer = csv.writer(response) writer.writerow(dataColumns) dataAsValuesList = data.values_list(*attrKeys) for values in dataAsValuesList: writer.writerow(values) return response else: # format == 'xls' response = HttpResponse(content_type='text/ms-excel') response[ 'Content-Disposition'] = 'attachement; filename="exported_data.xls"' writer = xlwt.Workbook(encoding='utf-8') sheet = writer.add_sheet('Medições') row = 0 headerFont = xlwt.XFStyle() headerFont.font.bold # write the table header for col in range(len(dataColumns)): sheet.write(row, col, dataColumns[col], headerFont) bodyFont = xlwt.XFStyle() # write the body for values in data: row += 1 for col in range(len(values)): sheet.write(row, col, values[attrKeys[col]], bodyFont) writer.save(response) return response
def token_require(request): client_id = request.GET.get('client_id', '') state = request.GET.get('state', '') service = Service.objects.filter(name=client_id).first() if not service: raise SuspiciousOperation('INVALID_SERVICE') if len(state) < 8: raise SuspiciousOperation('INVALID_STATE') user = request.user profile = user.profile flags = user.profile.flags reason = 0 if flags['sysop']: reason = 1 elif service.scope == 'SPARCS' and not flags['sparcs']: reason = 2 elif service.scope == 'TEST' and not flags['test']: reason = 3 elif service.scope != 'TEST' and flags['test-only']: reason = 4 elif not (profile.email_authed or profile.facebook_id or profile.twitter_id or profile.kaist_id): reason = 5 if reason: return render(request, 'api/denied.html', { 'reason': reason, 'alias': service.alias, }) AccessToken.objects.filter(user=user, service=service).delete() m = ServiceMap.objects.filter(user=user, service=service).first() if not m or m.unregister_time: m_new = service_register(user, service) log_msg = 'success' if m_new else 'fail' logger.warning( f'register.{log_msg}', { 'r': request, 'extra': [ ('app', service.name), ('sid', m_new.sid if m_new else ''), ], }) if not m_new: left = service.cooltime - (timezone.now() - m.unregister_time).days return render(request, 'api/cooltime.html', { 'service': service, 'left': left, }) while True: tokenid = token_hex(10) if not AccessToken.objects.filter(tokenid=tokenid).count(): break token = AccessToken( tokenid=tokenid, user=user, service=service, expire_time=timezone.now() + timedelta(seconds=TIMEOUT), ) token.save() logger.info('login.try', { 'r': request, 'hide': True, 'extra': [('app', client_id)], }) return redirect(service.login_callback_url + '?' + urlencode({ 'code': token.tokenid, 'state': state, }))
def get_context_data(self, **kwargs): '''Hooks into request lifecycle and processes the order''' # Get cart from the current session cart = Cart(self.request.session) if not cart.products: raise SuspiciousOperation("Cart is empty") # Create order for each author orders = [] for product in cart.products: # Save new instance of product to the database product.order_copy = True # Backup reference to images from photoseries if isinstance(product, Photoseries): original_images = product.images.all() product.pk = None product.save() # Set reference to images again if isinstance(product, Photoseries): product.images.set(original_images) product.save() # Check if order for seller exists existing_order = [ order for order in orders if order.seller == product.owner ] if len(existing_order) != 1: order = Order() order.buyer = auth.get_user(self.request) order.seller = product.owner orders.append(order) order.save() else: order = existing_order[0] # Put product inside order if isinstance(product, Photo): order.photos.add(product) else: order.photoseries.add(product) order.save() # 4. Versende für jede Order zwei Mails # 4.1 Bestätigungsmail buyer = User.objects.get(id=order.buyer_id) send_mail( 'Order Confirmation', 'Your order was confirmed. The owner will contact you for the payment.', settings.EMAIL_HOST_USER, [buyer.email], fail_silently=True, auth_user=settings.EMAIL_HOST_USER, auth_password=settings.EMAIL_HOST_PASSWORD) # 4.2 Mail mit Info für den Verkäufer send_mail( 'New Order', 'You got a new order. Please login to accept the incoming order', settings.EMAIL_HOST_USER, [order.seller.email], fail_silently=True, auth_user=settings.EMAIL_HOST_USER, auth_password=settings.EMAIL_HOST_PASSWORD) # Show success message to user context = super(checkoutView, self).get_context_data(**kwargs) context['orders'] = orders cart.clear() return context
def _new_func(request, user_id, *args, **kwargs): if request.user.id != user_id: raise SuspiciousOperation("UserID doesn't match logged in user.") return func(request, user_id, *args, **kwargs)
def search(request): # TODO: used forms in every search type def _render_search_form(form=None): return render(request, 'djangobb_forum/search_form.html', { 'categories': Category.objects.all(), 'form': form, }) if not 'action' in request.GET: return _render_search_form(form=PostSearchForm()) if request.GET.get("show_as") == "posts": show_as_posts = True template_name = 'djangobb_forum/search_posts.html' else: show_as_posts = False template_name = 'djangobb_forum/search_topics.html' context = {} # Create 'user viewable' pre-filtered topics/posts querysets viewable_category = Category.objects.all() topics = Topic.objects.all().order_by("-last_post__created") posts = Post.objects.all().order_by('-created') user = request.user if not user.is_superuser: user_groups = user.groups.all() or [ ] # need 'or []' for anonymous user otherwise: 'EmptyManager' object is not iterable viewable_category = viewable_category.filter( Q(groups__in=user_groups) | Q(groups__isnull=True)) topics = Topic.objects.filter(forum__category__in=viewable_category) posts = Post.objects.filter( topic__forum__category__in=viewable_category) base_url = None _generic_context = True action = request.GET['action'] if action == 'show_24h': date = timezone.now() - timedelta(days=1) if show_as_posts: context["posts"] = posts.filter( Q(created__gte=date) | Q(updated__gte=date)) else: context["topics"] = topics.filter( Q(last_post__created__gte=date) | Q(last_post__updated__gte=date)) _generic_context = False elif action == 'show_new': if not user.is_authenticated: raise Http404( "Search 'show_new' not available for anonymous user.") try: last_read = PostTracking.objects.get(user=user).last_read except PostTracking.DoesNotExist: last_read = None if last_read: if show_as_posts: context["posts"] = posts.filter( Q(created__gte=last_read) | Q(updated__gte=last_read)) else: context["topics"] = topics.filter( Q(last_post__created__gte=last_read) | Q(last_post__updated__gte=last_read)) _generic_context = False else: #searching more than forum_settings.SEARCH_PAGE_SIZE in this way - not good idea :] topics_id = [ topic.id for topic in topics[:forum_settings.SEARCH_PAGE_SIZE] if forum_extras.has_unreads(topic, user) ] topics = Topic.objects.filter( id__in=topics_id) # to create QuerySet elif action == 'show_unanswered': topics = topics.filter(post_count=1) elif action == 'show_subscriptions': topics = topics.filter(subscribers__id=user.id) elif action == 'show_user': # Show all posts from user or topics started by user if not user.is_authenticated: raise Http404( "Search 'show_user' not available for anonymous user.") user_id = request.GET.get("user_id", user.id) try: user_id = int(user_id) except ValueError: raise SuspiciousOperation() if user_id != user.id: try: search_user = User.objects.get(id=user_id) except User.DoesNotExist: messages.error(request, _("Error: User unknown!")) return HttpResponseRedirect(request.path) messages.info( request, _("Filter by user '%(username)s'.") % {'username': search_user.username}) if show_as_posts: posts = posts.filter(user__id=user_id) else: # show as topic topics = topics.filter(posts__user__id=user_id).order_by( "-last_post__created").distinct() base_url = "?action=show_user&user_id=%s&show_as=" % user_id elif action == 'search': form = PostSearchForm(request.GET) if not form.is_valid(): return _render_search_form(form) keywords = form.cleaned_data['keywords'] author = form.cleaned_data['author'] forum = form.cleaned_data['forum'] search_in = form.cleaned_data['search_in'] sort_by = form.cleaned_data['sort_by'] sort_dir = form.cleaned_data['sort_dir'] query = SearchQuerySet().models(Post) if author: query = query.filter(author__username=author) if forum != '0': query = query.filter(forum__id=forum) if keywords: if search_in == 'all': query = query.filter(SQ(topic=keywords) | SQ(text=keywords)) elif search_in == 'message': query = query.filter(text=keywords) elif search_in == 'topic': query = query.filter(topic=keywords) order = { '0': 'created', '1': 'author', '2': 'topic', '3': 'forum' }.get(sort_by, 'created') if sort_dir == 'DESC': order = '-' + order post_pks = query.values_list("pk", flat=True) if not show_as_posts: # TODO: We have here a problem to get a list of topics without double entries. # Maybe we must add a search index over topics? # Info: If whoosh backend used, setup HAYSTACK_ITERATOR_LOAD_PER_QUERY # to a higher number to speed up context["topics"] = topics.filter(posts__in=post_pks).distinct() else: # FIXME: How to use the pre-filtered query from above? posts = posts.filter(pk__in=post_pks).order_by(order) context["posts"] = posts get_query_dict = request.GET.copy() get_query_dict.pop("show_as") base_url = "?%s&show_as=" % get_query_dict.urlencode() _generic_context = False if _generic_context: if show_as_posts: context["posts"] = posts.filter( topic__in=topics).order_by('-created') else: context["topics"] = topics if base_url is None: base_url = "?action=%s&show_as=" % action if show_as_posts: context['posts_page'] = get_page(context['posts'], request, forum_settings.SEARCH_PAGE_SIZE) context["as_topic_url"] = base_url + "topics" post_count = context["posts"].count() messages.success(request, _("Found %i posts.") % post_count) else: context['topics_page'] = get_page(context['topics'], request, forum_settings.SEARCH_PAGE_SIZE) context["as_post_url"] = base_url + "posts" topic_count = context["topics"].count() messages.success(request, _("Found %i topics.") % topic_count) return render(request, template_name, context)
def match_columns(request, import_log_id): """ View to match import spreadsheet columns with database fields """ import_log = get_object_or_404(ImportLog, id=import_log_id) if not request.user.is_superuser and import_log.user != request.user: raise SuspiciousOperation( "Non superuser attempting to view other users import") # need to generate matches if they don't exist already existing_matches = import_log.get_matches() MatchFormSet = inlineformset_factory(ImportSetting, ColumnMatch, form=MatchForm, extra=0) import_data = import_log.get_import_file_as_list() header_row = [x.lower() for x in import_data[0]] # make all lower sample_row = import_data[1] errors = [] model_class = import_log.import_setting.content_type.model_class() field_names = model_class._meta.get_all_field_names() for field_name in field_names: field_object, model, direct, m2m = model_class._meta.get_field_by_name( field_name) # We can't add a new AutoField and specify it's value if import_log.import_type == "N" and isinstance( field_object, AutoField): field_names.remove(field_name) if request.method == 'POST': formset = MatchFormSet(request.POST, instance=import_log.import_setting) if formset.is_valid(): formset.save() if import_log.import_type in ["U", "O"]: update_key = request.POST.get('update_key', '') if update_key: field_name = import_log.import_setting.columnmatch_set.get( column_name=update_key).field_name if field_name: field_object, model, direct, m2m = model_class._meta.get_field_by_name( field_name) if direct and field_object.unique: import_log.update_key = update_key import_log.save() else: errors += [ 'Update key must be unique. Please select a unique field.' ] else: errors += ['Update key must matched with a column.'] else: errors += [ 'Please select an update key. This key is used to linked records for updating.' ] errors += validate_match_columns(import_log, model_class, header_row) all_field_names = [] for clean_data in formset.cleaned_data: if clean_data['field_name']: if clean_data['field_name'] in all_field_names: errors += [ "{0} is duplicated.".format( clean_data['field_name']) ] all_field_names += [clean_data['field_name']] if not errors: return HttpResponseRedirect( reverse(match_relations, kwargs={'import_log_id': import_log.id})) else: formset = MatchFormSet(instance=import_log.import_setting, queryset=existing_matches) field_choices = (('', 'Do Not Use'), ) for field_name in field_names: field_object, model, direct, m2m = model_class._meta.get_field_by_name( field_name) add = True if direct: field_verbose = field_object.verbose_name else: field_verbose = field_name if direct and not field_object.blank: field_verbose += " (Required)" if direct and field_object.unique: field_verbose += " (Unique)" if m2m or isinstance(field_object, ForeignKey): field_verbose += " (Related)" elif not direct: add = False if add: field_choices += ((field_name, field_verbose), ) # Include django-custom-field support custom_fields = get_custom_fields_from_model(model_class) if custom_fields: for custom_field in custom_fields: field_choices += ( ("simple_import_custom__{0}".format(custom_field), "{0} (Custom)".format(custom_field)), ) # Include defined methods # Model must have a simple_import_methods defined if hasattr(model_class, 'simple_import_methods'): for import_method in model_class.simple_import_methods: field_choices += ( ("simple_import_method__{0}".format(import_method), "{0} (Method)".format(import_method)), ) # User model should allow set password if issubclass(model_class, User): field_choices += (("simple_import_method__{0}".format('set_password'), "Set Password (Method)"), ) for i, form in enumerate(formset): form.fields['field_name'].widget = forms.Select( choices=(field_choices)) form.sample = sample_row[i] return render_to_response( 'simple_import/match_columns.html', { 'import_log': import_log, 'formset': formset, 'errors': errors }, RequestContext(request, {}), )
def sign_in_by_email(request): if settings.STUDENT_SIGN_IN_VIEW != "course.auth.sign_in_by_email": raise SuspiciousOperation("email-based sign-in is not being used") if request.method == 'POST': form = SignInByEmailForm(request.POST) if form.is_valid(): from django.contrib.auth.models import User email = form.cleaned_data["email"] user, created = User.objects.get_or_create(email__iexact=email, defaults=dict( username=email, email=email)) if created: user.set_unusable_password() user.save() ustatus, ustatus_created = UserStatus.objects.get_or_create( user=user, defaults=dict(status=user_status.unconfirmed, sign_in_key=make_sign_in_key(user))) if not created: ustatus.sign_in_key = make_sign_in_key(user) ustatus.save() from django.template.loader import render_to_string message = render_to_string( "course/sign-in-email.txt", { "user": user, "sign_in_uri": request.build_absolute_uri( reverse("course.auth.sign_in_stage2_with_token", args=( user.id, ustatus.sign_in_key, ))), "home_uri": request.build_absolute_uri(reverse("course.views.home")) }) from django.core.mail import send_mail send_mail("Your RELATE sign-in link", message, settings.ROBOT_EMAIL_FROM, recipient_list=[email]) messages.add_message( request, messages.INFO, "Email sent. Please check your email and click the link.") return redirect("course.views.home") else: form = SignInByEmailForm() return render(request, "course/login-by-email.html", { "form_description": "", "form": form })
def post(self, request, attribute_mapping=None, create_unknown_user=None): """ SAML Authorization Response endpoint """ if 'SAMLResponse' not in request.POST: logger.warning('Missing "SAMLResponse" parameter in POST data.') return HttpResponseBadRequest( 'Missing "SAMLResponse" parameter in POST data.') attribute_mapping = attribute_mapping or get_custom_setting( 'SAML_ATTRIBUTE_MAPPING', {'uid': ('username', )}) create_unknown_user = create_unknown_user or get_custom_setting( 'SAML_CREATE_UNKNOWN_USER', True) conf = self.get_sp_config(request) identity_cache = IdentityCache(request.saml_session) client = Saml2Client(conf, identity_cache=identity_cache) oq_cache = OutstandingQueriesCache(request.saml_session) oq_cache.sync() outstanding_queries = oq_cache.outstanding_queries() _exception = None try: response = client.parse_authn_request_response( request.POST['SAMLResponse'], saml2.BINDING_HTTP_POST, outstanding_queries) except (StatusError, ToEarly) as e: _exception = e logger.exception("Error processing SAML Assertion.") except ResponseLifetimeExceed as e: _exception = e logger.info(("SAML Assertion is no longer valid. Possibly caused " "by network delay or replay attack."), exc_info=True) except SignatureError as e: _exception = e logger.info("Invalid or malformed SAML Assertion.", exc_info=True) except StatusAuthnFailed as e: _exception = e logger.info("Authentication denied for user by IdP.", exc_info=True) except StatusRequestDenied as e: _exception = e logger.warning("Authentication interrupted at IdP.", exc_info=True) except StatusNoAuthnContext as e: _exception = e logger.warning("Missing Authentication Context from IdP.", exc_info=True) except MissingKey as e: _exception = e logger.exception( "SAML Identity Provider is not configured correctly: certificate key is missing!" ) except UnsolicitedResponse as e: _exception = e logger.exception( "Received SAMLResponse when no request has been made.") except RequestVersionTooLow as e: _exception = e logger.exception( "Received SAMLResponse have a deprecated SAML2 VERSION.") except Exception as e: _exception = e logger.exception("SAMLResponse Error") if _exception: return self.handle_acs_failure(request, exception=_exception) elif response is None: logger.warning("Invalid SAML Assertion received (unknown error).") return self.handle_acs_failure( request, status=400, exception=SuspiciousOperation('Unknown SAML2 error')) try: self.custom_validation(response) except Exception as e: logger.warning(f"SAML Response validation error: {e}") return self.handle_acs_failure( request, status=400, exception=SuspiciousOperation('SAML2 validation error')) session_id = response.session_id() oq_cache.delete(session_id) # authenticate the remote user session_info = response.session_info() # assertion_info assertion = response.assertion assertion_info = {} for sc in assertion.subject.subject_confirmation: if sc.method == SCM_BEARER: assertion_not_on_or_after = sc.subject_confirmation_data.not_on_or_after assertion_info = { 'assertion_id': assertion.id, 'not_on_or_after': assertion_not_on_or_after } break if callable(attribute_mapping): attribute_mapping = attribute_mapping() if callable(create_unknown_user): create_unknown_user = create_unknown_user() logger.debug('Trying to authenticate the user. Session info: %s', session_info) user = auth.authenticate(request=request, session_info=session_info, attribute_mapping=attribute_mapping, create_unknown_user=create_unknown_user, assertion_info=assertion_info) if user is None: logger.warning( "Could not authenticate user received in SAML Assertion. Session info: %s", session_info) return self.handle_acs_failure( request, exception=PermissionDenied('No user could be authenticated.'), session_info=session_info) auth.login(self.request, user) _set_subject_id(request.saml_session, session_info['name_id']) logger.debug("User %s authenticated via SSO.", user) self.post_login_hook(request, user, session_info) self.customize_session(user, session_info) relay_state = self.build_relay_state() custom_redirect_url = self.custom_redirect(user, relay_state, session_info) if custom_redirect_url: return HttpResponseRedirect(custom_redirect_url) relay_state = validate_referral_url(request, relay_state) logger.debug('Redirecting to the RelayState: %s', relay_state) return HttpResponseRedirect(relay_state)
def post(self, *args, **kwargs): try: return super(TwoFactorSetupView, self).post(*args, **kwargs) except ValidationError: raise SuspiciousOperation( "ManagementForm data is missing or has been tampered.")
def process_response(self, request: HttpRequest, response: HttpResponse) -> HttpResponse: try: request.get_host() except DisallowedHost: # If we get a DisallowedHost exception trying to access # the host, (1) the request is failed anyway and so the # below code will do nothing, and (2) the below will # trigger a recursive exception, breaking things, so we # just return here. return response if (not request.path.startswith("/static/") and not request.path.startswith("/api/") and not request.path.startswith("/json/")): subdomain = get_subdomain(request) if subdomain != Realm.SUBDOMAIN_FOR_ROOT_DOMAIN: realm = get_realm(subdomain) if (realm is None): return render(request, "zerver/invalid_realm.html", status=404) """ If request.session was modified, or if the configuration is to save the session every time, save the changes and set a session cookie or delete the session cookie if the session has been emptied. """ try: accessed = request.session.accessed modified = request.session.modified empty = request.session.is_empty() except AttributeError: pass else: # First check if we need to delete this cookie. # The session should be deleted only if the session is entirely empty if settings.SESSION_COOKIE_NAME in request.COOKIES and empty: response.delete_cookie( settings.SESSION_COOKIE_NAME, path=settings.SESSION_COOKIE_PATH, domain=settings.SESSION_COOKIE_DOMAIN, ) else: if accessed: patch_vary_headers(response, ('Cookie', )) if (modified or settings.SESSION_SAVE_EVERY_REQUEST) and not empty: if request.session.get_expire_at_browser_close(): max_age = None expires = None else: max_age = request.session.get_expiry_age() expires_time = time.time() + max_age expires = cookie_date(expires_time) # Save the session data and refresh the client cookie. # Skip session save for 500 responses, refs #3881. if response.status_code != 500: try: request.session.save() except UpdateError: raise SuspiciousOperation( "The request's session was deleted before the " "request completed. The user may have logged " "out in a concurrent request, for example.") host = request.get_host().split(':')[0] # The subdomains feature overrides the # SESSION_COOKIE_DOMAIN setting, since the setting # is a fixed value and with subdomains enabled, # the session cookie domain has to vary with the # subdomain. session_cookie_domain = host response.set_cookie( settings.SESSION_COOKIE_NAME, request.session.session_key, max_age=max_age, expires=expires, domain=session_cookie_domain, path=settings.SESSION_COOKIE_PATH, secure=settings.SESSION_COOKIE_SECURE or None, httponly=settings.SESSION_COOKIE_HTTPONLY or None, ) return response
def revert(request): if not request.is_ajax() or not request.POST: raise Http404 version_id = request.POST['id'] document_url_title = request.POST['url_title'] document = get_object_or_404(Document, url_title=document_url_title) check_permissions(document, request.user, [document.edit_permission_name]) versions = Version.objects.get_for_object(document) if not document.can_be_reverted: raise SuspiciousOperation('This Document can not be reverted!') # find the we want to revert to revert_version = None for version in versions: if version.pk == int(version_id): revert_version = version break if revert_version is None: # user supplied version_id that does not exist raise SuspiciousOperation('Could not find document') revert_version.revision.revert(delete=False) fields = revert_version.field_dict document_class = ContentType.objects.get_for_id( fields.pop('polymorphic_ctype_id')).model_class() # Remove all references to parent objects, rename ForeignKeyFields, extract ManyToManyFields. new_fields = fields.copy() many_to_many_fields = {} for key in fields.keys(): if "_ptr" in key: del new_fields[key] continue try: field = getattr(document_class, key).field except AttributeError: continue if isinstance(field, models.ManyToManyField): many_to_many_fields[key] = fields[key] else: new_fields[field.attname] = fields[key] del new_fields[key] reverted_document = document_class(**new_fields) with transaction.atomic(), revisions.create_revision(): reverted_document.save() # Restore ManyToManyFields for key in many_to_many_fields.keys(): getattr(reverted_document, key).clear() getattr(reverted_document, key).add(*many_to_many_fields[key]) revisions.set_user(request.user) revisions.set_comment( _('reverted to revision \"{revision_comment}\" (at {date})'.format( revision_comment=revert_version.revision.get_comment(), date=datetime.utcnow().strftime("%Y-%m-%d %H:%M"), ))) return HttpResponse(reverse('versions', args=[reverted_document.url_title]))
def get_import_file_content_or_raise(user_id, import_type): filename = generate_import_filename(user_id, import_type) if not os.path.isfile(filename): raise SuspiciousOperation("No test run performed previously.") with open(filename, "rb") as file: return file.read()
def get_filters(self, request): lookup_params = self.params.copy() # a dictionary of the query string use_distinct = False # Remove all the parameters that are globally and systematically # ignored. for ignored in IGNORED_PARAMS: if ignored in lookup_params: del lookup_params[ignored] # Normalize the types of keys for key, value in lookup_params.items(): if not isinstance(key, str): # 'key' will be used as a keyword argument later, so Python # requires it to be a string. del lookup_params[key] lookup_params[smart_str(key)] = value if not self.model_admin.lookup_allowed(key, value): raise SuspiciousOperation("Filtering by %s not allowed" % key) filter_specs = [] if self.list_filter: for list_filter in self.list_filter: if callable(list_filter): # This is simply a custom list filter class. spec = list_filter(request, lookup_params, self.model, self.model_admin) else: field_path = None if isinstance(list_filter, (tuple, list)): # This is a custom FieldListFilter class for a given field. field, field_list_filter_class = list_filter else: # This is simply a field name, so use the default # FieldListFilter class that has been registered for # the type of the given field. field, field_list_filter_class = list_filter, FieldListFilter.create if not isinstance(field, models.Field): field_path = field field = get_fields_from_path(self.model, field_path)[-1] spec = field_list_filter_class(field, request, lookup_params, self.model, self.model_admin, field_path=field_path) # Check if we need to use distinct() use_distinct = (use_distinct or lookup_needs_distinct( self.lookup_opts, field_path)) if spec and spec.has_output(): filter_specs.append(spec) # At this point, all the parameters used by the various ListFilters # have been removed from lookup_params, which now only contains other # parameters passed via the query string. We now loop through the # remaining parameters both to ensure that all the parameters are valid # fields and to determine if at least one of them needs distinct(). If # the lookup parameters aren't real fields, then bail out. try: for key, value in lookup_params.items(): lookup_params[key] = prepare_lookup_value(key, value) use_distinct = (use_distinct or lookup_needs_distinct( self.lookup_opts, key)) return filter_specs, bool( filter_specs), lookup_params, use_distinct except FieldDoesNotExist, e: raise IncorrectLookupParameters(e)
def clean(self): super().clean() phone_number = self.cleaned_data.get('phone_number') phone_number_initial = self.fields['phone_number'].initial if phone_number != phone_number_initial: raise SuspiciousOperation('Phone number changed!!!')
def test_suspicious_operation_in_build_absolute_uri(self, build_absolute_uri): build_absolute_uri.side_effect = SuspiciousOperation() request = make_request() result = self.raven.get_data_from_request(request) build_absolute_uri.assert_called_once_with() assert 'sentry.interfaces.Http' not in result
def validate_segment_count(self, request=None, count=None): count = get_param(request, "count", coerce=int, required=False) or 0 if count is None else count if SEGMENT_LIMIT < count: raise SuspiciousOperation("Upload has too many segments!")