def new_function(self, request, *args, **kwargs): """ Wrapped function """ now = time.time() key = 'ovs_api_limit_{0}.{1}_{2}'.format( f.__module__, f.__name__, request.META['HTTP_X_REAL_IP']) client = VolatileFactory.get_client() mutex = VolatileMutex(key) try: mutex.acquire() rate_info = client.get(key, {'calls': [], 'timeout': None}) active_timeout = rate_info['timeout'] if active_timeout is not None: if active_timeout > now: raise Throttled(wait=active_timeout - now) else: rate_info['timeout'] = None rate_info['calls'] = [ call for call in rate_info['calls'] if call > (now - per) ] + [now] calls = len(rate_info['calls']) if calls > amount: rate_info['timeout'] = now + timeout client.set(key, rate_info) raise Throttled(wait=timeout) client.set(key, rate_info) finally: mutex.release() return f(self, request, *args, **kwargs)
def new_function(*args, **kwargs): """ Wrapped function """ request = _find_request(args) now = time.time() key = 'ovs_api_limit_{0}.{1}_{2}'.format( f.__module__, f.__name__, request.META['HTTP_X_REAL_IP']) client = VolatileFactory.get_client() with volatile_mutex(key): rate_info = client.get(key, {'calls': [], 'timeout': None}) active_timeout = rate_info['timeout'] if active_timeout is not None: if active_timeout > now: logger.warning( 'Call {0} is being throttled with a wait of {1}'. format(key, active_timeout - now)) raise Throttled(wait=active_timeout - now) else: rate_info['timeout'] = None rate_info['calls'] = [ call for call in rate_info['calls'] if call > (now - per) ] + [now] calls = len(rate_info['calls']) if calls > amount: rate_info['timeout'] = now + timeout client.set(key, rate_info) logger.warning( 'Call {0} is being throttled with a wait of {1}'. format(key, timeout)) raise Throttled(wait=timeout) client.set(key, rate_info) return f(*args, **kwargs)
def login_view(request): if "timeout" in request.session: wait = ceil(request.session["timeout"] - time.time()) if wait > 0: raise Throttled(detail=f"AUTH.THROTTLED.{wait}") username = request.data.get("username", None) password = request.data.get("password", None) if username is None or password is None: raise AuthenticationFailed("AUTH.CREDENTIALS_NONEXISTENT") user = authenticate(username=username, password=password) if user is None: request.session["attempts"] = request.session.get("attempts", 0) + 1 if request.session["attempts"] >= 10: # exponential timeout for each failed login attempt past the 10th timeout = (1 << (request.session["attempts"] - 10)) * 60 request.session["timeout"] = time.time() + timeout raise Throttled(detail=f"AUTH.THROTTLED.{timeout}") raise AuthenticationFailed("AUTH.CREDENTIALS_INVALID") login(request, user) if "attempts" in request.session: del request.session["attempts"] if "timeout" in request.session: del request.session["timeout"] return Response(UserSerializer(user).data)
def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) user = self.get_user() existing_password = request.data['existing_password'] new_password = request.data['new_password'] # It has been more than 1 hour since last invalid attempt to change password. Reset the counter for invalid attempts. if throttle_period_expired(user.change_password_last_attempt, settings.TIME_RESET_CHANGE_PASSWORD_ATTEMPTS): user.reset_old_password_invalid_attempts() # There have been more than 3 failed attempts and throttle hasn't expired. if user.old_password_invalid_attempts >= settings.INCORRECT_PASSWORD_ATTEMPTS_ALLOWED and not throttle_period_expired( user.change_password_last_attempt, settings.CHANGE_PASSWORD_THROTTLE, ): time_since_throttle = (timezone.now() - user.change_password_last_attempt.replace(tzinfo=pytz.utc)).total_seconds() wait_time = settings.CHANGE_PASSWORD_THROTTLE - time_since_throttle raise Throttled(wait=wait_time) try: # double new password for confirmation because validation is done on the front-end. user.change_password(existing_password, new_password, new_password) except ChangePasswordError as error: # A response object must be returned instead of raising an exception to avoid rolling back the transaction # and losing the incrementation of failed password attempts user.save() return JsonResponse( {'errors': [{'detail': message} for message in error.messages]}, status=400, content_type='application/vnd.api+json; application/json', ) user.save() remove_sessions_for_user(user) return Response(status=status.HTTP_204_NO_CONTENT)
def has_task_completed(dispatched_task): """ Wait a couple of seconds until the task finishes its run. Returns: bool: True if the task ends successfully. Raises: Exception: If an error occurs during the task's runtime. Throttled: If the task did not finish within a predefined timespan. """ for dummy in range(3): time.sleep(1) task = Task.objects.get(pk=dispatched_task.pk) if task.state == "completed": task.delete() return True elif task.state in ["waiting", "running"]: continue else: error = task.error task.delete() raise Exception(str(error)) raise Throttled()
def error(self, user=None, api=None, system=None, details=None): """Method for notifying admins and raising a Throttle exception Notifications are send to the Production Mailing List SNS topic Args: user (optional[str]): Name of the user, if the user is throttled api (optional[str]): Name of the API, if the API is throttled system (optional[bool]): If the system is throttled details (dict): Information about the API call that will be included in the notification to the administrators Raises: Throttle: Exception with generic information on why the call was throttled """ if user: ex_msg = self.user_error_detail sns_msg = "Throttling user '{}': {}".format( user.username, json.dumps(details)) elif api: ex_msg = self.api_error_detail sns_msg = "Throttling API '{}': {}".format(api, json.dumps(details)) elif system: ex_msg = self.system_error_detail sns_msg = "Throttling system: {}".format(json.dumps(details)) client = bossutils.aws.get_session().client('sns') client.publish(TopicArn=self.topic, Subject='Boss Request Throttled', Message=sns_msg) raise Throttled(detail=ex_msg)
def create(self, validated_data): if self._get_cache() is not None: raise Throttled() callback = validated_data['callback'] signed_data = signing.dumps( dict(callback=callback, user_id=self.user.pk)) callback = add_params(callback, sign=signed_data) email_message = get_password_reset_email(self.user, callback) try: email_message.send() except smtplib.SMTPServerDisconnected as e: raise serializers.ValidationError( 'Mail sending timeout.', code=status.HTTP_500_INTERNAL_SERVER_ERROR) except smtplib.SMTPException as e: raise serializers.ValidationError( 'Unknown SMTP error: %s.' % str(e), code=status.HTTP_500_INTERNAL_SERVER_ERROR) else: self._set_cache() return 'OK'
def has_permission(self, request: Request, view): has_project_perms = super().has_permission(request, view) if not has_project_perms: return False current_p2s: ProjectSubscription = request.current_p2s if not current_p2s.subscription.pep_checks: return False unix_now = int(time.time()) unix_minute_now = unix_now // 60 if unix_minute_now > current_p2s.pep_checks_minute: current_p2s.pep_checks_minute = unix_minute_now current_p2s.pep_checks_count_per_minute = 1 current_p2s.save(update_fields=[ 'pep_checks_minute', 'pep_checks_count_per_minute', ]) return True elif current_p2s.pep_checks_count_per_minute >= current_p2s.subscription.pep_checks_per_minute: raise Throttled(wait=60 - (unix_now - (current_p2s.pep_checks_minute * 60))) else: current_p2s.pep_checks_count_per_minute += 1 current_p2s.save(update_fields=['pep_checks_count_per_minute']) return True
def throttled(self, request, wait): raise Throttled( detail={ "detail": _( "You can't upload more than 3 documents per 24 hours."), "available_in": f"{wait} seconds" })
def custom_exception_handler(exc, context): if isinstance(exc, RateLimitExceeded): # If Snuba throws a RateLimitExceeded then it'll likely be available # after another second. exc = Throttled(wait=1) return exception_handler(exc, context)
def check_coin_gecko_rate_limit(): cache = caches['default'] # Check when the API was last consumed coin_gecko_consumption_time = cache.get_or_set( 'coin_gecko_consumption_time', timezone.now(), 100) # Check how many times the API has been consumed in the last minute coin_gecko_consumption_count = cache.get('coin_gecko_consumption_count', 0) if coin_gecko_consumption_count == 0: # The API has not yet been consumed, so mark this as the first consumption and proceed cache.set('coin_gecko_consumption_count', 1, 60) return True delta = timezone.now() - coin_gecko_consumption_time if delta.seconds >= 60: # The rate limit is 100 requests per minute, so if the minute has passed, reset the rate limit and proceed cache.set('coin_gecko_consumption_count', 0, 60) cache.set('coin_gecko_consumption_time', timezone.now(), 100) return True else: # Still within the minute rate limiter, so check if there are still available requests if coin_gecko_consumption_count < 100: cache.set('coin_gecko_consumption_count', coin_gecko_consumption_count + 1) return True raise Throttled(detail=TOO_MANY_API_REQUESTS.format(delta_seconds=60 - delta.seconds))
def perform_create(self, serializer): post_count = Post.objects.filter(user=self.request.user).count() if post_count >= env('MAX_POSTS_PER_USER'): msg = "You have reached the limit of posts per user." raise Throttled(wait=None, detail=msg, code=None) else: serializer.save(user=self.request.user)
def test_get_full_details_with_throttling(self): exception = Throttled() assert exception.get_full_details() == { 'message': 'Request was throttled.', 'code': 'throttled'} exception = Throttled(wait=2) assert exception.get_full_details() == { 'message': 'Request was throttled. Expected available in {} seconds.'.format(2), 'code': 'throttled'} exception = Throttled(wait=2, detail='Slow down!') assert exception.get_full_details() == { 'message': 'Slow down! Expected available in {} seconds.'.format(2), 'code': 'throttled'}
def add_like(obj, user): obj_type = ContentType.objects.get_for_model(obj) like_count = Like.objects.filter(user=user).count() if like_count >= env('MAX_LIKES_PER_USER'): msg = "You have reached the limit of likes per user." raise Throttled(wait=None, detail=msg, code=None) else: like, is_created = Like.objects.get_or_create( content_type=obj_type, object_id=obj.id, user=user) return like
def _check_throttle(self): from django.core.cache import cache from biohub.core.conf import settings as biohub_settings request = self.context['request'] ip = get_ip_from_request(request) key = '{}_{}_register'.format(ip, request.session.session_key) if cache.get(key) is not None: raise Throttled() cache.set(key, 1, timeout=biohub_settings.THROTTLE['register'])
def create(self, validated_data): from django.core.cache import cache from biohub.core.conf import settings as biohub_settings key = 'user_{}_rate'.format(self.context['user'].id) if cache.get(key) is not None: raise Throttled() cache.set(key, 1, timeout=biohub_settings.THROTTLE['rate']) return self.context['brick'].rate(self.context['user'], validated_data['score'])
def allow_request(self, request, view): request_limit=Message.objects.filter(user=request.user) if self.rate is None: return True self.key = self.get_cache_key(request, view) if self.key is None: return True if len(self.history) >= 3: data = Counter(self.history) for key, value in data.items(): if value == 15: raise Throttled(detail=( "You have reached the limit of 15 open requests. " "Please wait until your existing requests have been " "evaluated before submitting additional disputes. "))
def mount_blob(self, request, path, repository): """Mount a blob that is already present in another repository.""" from_path = request.query_params["from"] try: distribution = models.ContainerDistribution.objects.get( base_path=from_path) except models.ContainerDistribution.DoesNotExist: raise RepositoryNotFound(name=path) try: version = distribution.repository_version or distribution.repository.latest_version( ) except AttributeError: # the distribution does not contain reference to the source repository version raise RepositoryNotFound(name=from_path) digest = request.query_params["mount"] try: blob = models.Blob.objects.get(digest=digest, pk__in=version.content) except models.Blob.DoesNotExist: raise BlobNotFound(digest=digest) dispatched_task = dispatch( add_and_remove, shared_resources=[version.repository], exclusive_resources=[repository], kwargs={ "repository_pk": str(repository.pk), "add_content_units": [str(blob.pk)], "remove_content_units": [], }, ) # Wait a small amount of time for dummy in range(3): time.sleep(1) task = Task.objects.get(pk=dispatched_task.pk) if task.state == "completed": task.delete() return BlobResponse(blob, path, 201, request) elif task.state in ["waiting", "running"]: continue else: error = task.error task.delete() raise Exception(str(error)) raise Throttled()
def check_throttle_classes(info, field, throttle_classes): if throttle_classes is None: if hasattr(info, "context") and info.context and info.context.get( "view", None): throttle_classes = info.context.get( "view").resolver_throttle_classes else: warnings.warn( UserWarning("{} should not be called without context.".format( field.__name__))) if throttle_classes is not None: for throttle in [t() for t in throttle_classes]: if not throttle.allow_request(info.context.get("request"), info.context.get("view")): raise Throttled(throttle.wait())
def create(self, validated_data): from biohub.core.conf import settings as biohub_settings from datetime import timedelta from django.utils.timezone import now brick = validated_data.pop('brick_name') author = validated_data['author'] if Experience.objects.filter( author=author, brick=brick, pub_time__gte=now() - timedelta( seconds=biohub_settings.THROTTLE['experience'])).exists(): raise Throttled() content_serializer = validated_data.pop('content_input') content = content_serializer.save() experience = Experience.objects.create(brick=brick, content=content, author_name=author.username, **validated_data) return experience
def vote(self, user): from django.core.cache import cache from biohub.core.conf import settings as biohub_settings key = 'user_{}_vote'.format(user.id) if cache.get(key) is not None: raise Throttled() cache.set(key, 1, timeout=biohub_settings.THROTTLE['vote']) if self.author is not None and self.author.id == user.id: return False if not self.voted_users.filter(pk=user.id).exists(): with transaction.atomic(): self.votes += 1 self.voted_users.add(user) self.save(update_fields=['votes']) voted_experience_signal.send(sender=self.__class__, instance=self, user_voted=user, current_votes=self.votes) return True return False
def submit_flag(self, *_args, **_kwargs): if getattr(self.request, 'limited', False): raise Throttled(detail='You can submit flag 4 times per minute.') task = self.get_object() if self.request.user.has_perm('api.change_task', task): raise ValidationError({'flag': 'You cannot submit this task'}) contest = self.get_contest() team = self.get_participating_team(contest) relation = api.models.ContestTaskRelationship.objects.get( contest=contest, task=task, ) if team and relation.solved_by.filter(id=team.id).exists(): raise PermissionDenied('Task already solved.') serializer = api.tasks.serializers.TaskSubmitSerializer( data=self.request.data, instance=task, ) serializer.is_valid(raise_exception=True) if team and contest.is_running: relation.solved_by.add(team) contest.contest_participant_relationship.filter( participant_id=self.request.user.id, ).update( last_solve=timezone.now(), ) task.solved_by.add(self.request.user) self.request.user.last_solve = timezone.now() self.request.user.save() return Response('accepted!')
def throttled(self, request, wait): raise Throttled(detail={"message": "rate limit exceeded"})
def put(self, request, path, pk=None): """ Responds with the actual manifest """ _, repository = self.get_dr_push(request, path) # iterate over all the layers and create chunk = request.META["wsgi.input"] artifact = self.receive_artifact(chunk) with storage.open(artifact.file.name) as artifact_file: raw_data = artifact_file.read() content_data = json.loads(raw_data) config_layer = content_data.get("config") config_blob = models.Blob.objects.get(digest=config_layer.get("digest")) manifest = models.Manifest( digest="sha256:{id}".format(id=artifact.sha256), schema_version=2, media_type=request.content_type, config_blob=config_blob, ) try: manifest.save() except IntegrityError: manifest = models.Manifest.objects.get(digest=manifest.digest) ca = ContentArtifact(artifact=artifact, content=manifest, relative_path=manifest.digest) try: ca.save() except IntegrityError: pass layers = content_data.get("layers") blobs = [] for layer in layers: blobs.append(layer.get("digest")) blobs_qs = models.Blob.objects.filter(digest__in=blobs) thru = [] for blob in blobs_qs: thru.append(models.BlobManifest(manifest=manifest, manifest_blob=blob)) models.BlobManifest.objects.bulk_create(objs=thru, ignore_conflicts=True, batch_size=1000) tag = models.Tag(name=pk, tagged_manifest=manifest) try: tag.save() except IntegrityError: tag = models.Tag.objects.get(name=tag.name, tagged_manifest=manifest) tags_to_remove = models.Tag.objects.filter( pk__in=repository.latest_version().content.all(), name=tag ).exclude(tagged_manifest=manifest) dispatched_task = dispatch( add_and_remove, [repository], kwargs={ "repository_pk": str(repository.pk), "add_content_units": [str(tag.pk), str(manifest.pk)], "remove_content_units": [str(pk) for pk in tags_to_remove.values_list("pk")], }, ) # Wait a small amount of time for dummy in range(3): time.sleep(1) task = Task.objects.get(pk=dispatched_task.pk) if task.state == "completed": task.delete() return ManifestResponse(manifest, path, request, status=201) elif task.state in ["waiting", "running"]: continue else: error = task.error task.delete() raise Exception(str(error)) raise Throttled()
def put(self, request, path, pk=None): """ Create a blob from uploaded chunks. """ _, repository = self.get_dr_push(request, path) digest = request.query_params["digest"] # Try to see if the client came back after we told it to backoff with the ``Throttled`` # exception. In that case we answer based on the task state, or make it backoff again. # This mechanism seems to work with podman but not with docker. However we let the task run # anyway, since all clients will look with a HEAD request before attemting to upload a blob # again. try: upload = models.Upload.objects.get(pk=pk, repository=repository) except models.Upload.DoesNotExist as e_upload: # Upload has been deleted => task has started or even finished try: task = Task.objects.filter( name__endswith="add_and_remove", reserved_resources_record__resource=f"upload:{pk}", ).last() except Task.DoesNotExist: # No upload and no task for it => the upload probably never existed # return 404 raise e_upload if task.state == "completed": task.delete() blob = models.Blob.objects.get(digest=digest) return BlobResponse(blob, path, 201, request) elif task.state in ["waiting", "running"]: raise Throttled() else: error = task.error task.delete() raise Exception(str(error)) chunks = UploadChunk.objects.filter(upload=upload).order_by("offset") with NamedTemporaryFile("ab") as temp_file: for chunk in chunks: temp_file.write(chunk.file.read()) temp_file.flush() uploaded_file = PulpTemporaryUploadedFile.from_file(File(open(temp_file.name, "rb"))) if uploaded_file.hashers["sha256"].hexdigest() == digest[len("sha256:") :]: try: artifact = Artifact.init_and_validate(uploaded_file) artifact.save() except IntegrityError: artifact = Artifact.objects.get(sha256=artifact.sha256) try: blob = models.Blob(digest=digest, media_type=models.MEDIA_TYPE.REGULAR_BLOB) blob.save() except IntegrityError: blob = models.Blob.objects.get(digest=digest) try: blob_artifact = ContentArtifact( artifact=artifact, content=blob, relative_path=digest ) blob_artifact.save() except IntegrityError: pass upload.delete() dispatched_task = dispatch( add_and_remove, [f"upload:{pk}", repository], kwargs={ "repository_pk": str(repository.pk), "add_content_units": [str(blob.pk)], "remove_content_units": [], }, ) # Wait a small amount of time for dummy in range(3): time.sleep(1) task = Task.objects.get(pk=dispatched_task.pk) if task.state == "completed": task.delete() return BlobResponse(blob, path, 201, request) elif task.state in ["waiting", "running"]: continue else: error = task.error task.delete() raise Exception(str(error)) raise Throttled() else: raise Exception("The digest did not match")
def submit_flag(self, *_args, **_kwargs): if getattr(self.request, 'limited', False): raise Throttled(detail='You can submit a flag 4 times per minute.') task = self.get_object() if self.request.user.has_perm('api.change_task', task): raise ValidationError({'flag': 'You cannot submit this task'}) contest = self.get_contest() cpr = contest.get_cpr(self.request.user) # One cannot submit the task during the contest without a registered team # to prevent abuse. if not cpr and contest.is_running: raise ValidationError( {'flag': 'You are not registered for a contest'}) relation = api.models.ContestTaskRelationship.objects.get( contest=contest, task=task, ) if cpr and relation.solved_by.filter(id=cpr.participant_id).exists(): raise PermissionDenied('Task already solved.') # noinspection PyUnresolvedReferences serializer = api.tasks.serializers.TaskSubmitSerializer( data=self.request.data, instance=task, ) submission = api.models.Submission( user=self.request.user, participant_id=cpr.participant_id, contest=contest, task=task, flag=serializer.initial_data.get('flag'), ) try: serializer.is_valid(raise_exception=True) submission.success = True except ValidationError: submission.success = False raise finally: submission.save() solved_at = timezone.now() user = self.request.user upsolving = True # If is registered and (if virtual) is participating virtually. if cpr and contest.is_running and ( not contest.is_virtual or contest.is_virtually_running_for(user)): upsolving = False relation.solved_by.add(cpr.participant_id) contest.contest_participant_relationship.filter( id=cpr.id, last_solve__lt=solved_at, ).update(last_solve=solved_at, ) task.solved_by.add(user) # fixme: solved_at is updated even when task is already solved on upsolving self.request.user.last_solve = solved_at self.request.user.save(update_fields=['last_solve']) return Response(dict(success=True, upsolving=upsolving))
def create_message(self, request, *args, **kwargs): """ Create a message attached to a service. The service must either be identified in the URL pattern, or as a query parameter, or in the request data, all as the variable ``service_name``. The precedance is in that same order, so for example, if the service is defined in the URL parameter, then the service in the request data will be ignored. """ # extract service name if "service_name" in self.kwargs: service_name = self.kwargs.get("service_name") elif "service_name" in request.query_params: service_name = request.query_params.get("service_name") elif "service_name" in request.data: service_name = request.data.get("service_name") else: raise NotFound(detail="Target service name not provided.") # resolve to service, or raise error try: service = Service.objects.get(name=service_name) except (Service.DoesNotExist, ValueError, TypeError): raise NotFound(detail="Target service not found.") # check for service-level permissions allowed_groups = service.allowed_groups.values_list("pk", flat=True) if not request.user.groups.filter(pk__in=allowed_groups): raise PermissionDenied() # extract FROM email from_email = request.data.get("from", None) if from_email: from_email, _ = EmailAddress.get_or_create(from_email) # check body body_raw = request.data.get("body", "") or "" if isinstance(body_raw, dict): if service.json_body_policy == service.FORBID: raise ValidationError(detail="Body cannot be a JSON object.") body = json.dumps(body_raw) elif isinstance(body_raw, str): body = body_raw else: raise ValidationError( detail="Body has invalid type {}".format(type(body_raw))) # build message message = Message( service=service, override_from_email_address=from_email, subject=request.data.get("subject", "") or "", body=body, ) # associate message with user message.user_type = ContentType.objects.get_for_model(request.user) message.user_id = request.user.pk # try saving, handling RateLimitException try: message.save() except RateLimitException: raise Throttled(detail="Rate limit has been reached!") except JSONBodyRequired: raise ValidationError(detail="Body must be a JSON object.") # add emails to the message message.extra_to_email_addresses.add( *EmailAddress.convert_emails(request.data.get("to", []))) message.extra_cc_email_addresses.add( *EmailAddress.convert_emails(request.data.get("cc", []))) message.extra_bcc_email_addresses.add( *EmailAddress.convert_emails(request.data.get("bcc", []))) # signal message can be sent message.ready_to_send = True message.save() return Response({}, status=status.HTTP_201_CREATED)
def throttled(self, request, wait): """ If request is throttled, determine what kind of exception to raise. """ raise Throttled(wait)