def get(self, request, wizard_hash=None): """ This tries to retrieve and return the cache content if possible otherwise creates new cache """ if wizard_hash is not None: key = "%s%s" % (SETUP_WIZARD_CACHE_KEY, wizard_hash) wizard_data = default_cache.get(key) if wizard_data is None: return Response(status=404) elif wizard_data == "empty": # when we just created a clean cache return Response(status=400) return Response(serialize(wizard_data)) else: # This creates a new available hash url for the project wizard rate_limited = ratelimits.is_limited(key="rl:setup-wizard:ip:%s" % request.META["REMOTE_ADDR"], limit=10) if rate_limited: logger.info("setup-wizard.rate-limit") return Response({"Too many wizard requests"}, status=403) wizard_hash = get_random_string( 64, allowed_chars="abcdefghijklmnopqrstuvwxyz012345679") key = "%s%s" % (SETUP_WIZARD_CACHE_KEY, wizard_hash) default_cache.set(key, "empty", SETUP_WIZARD_CACHE_TIMEOUT) return Response(serialize({"hash": wizard_hash}))
def download(self, debug_file_id, project): rate_limited = ratelimits.is_limited( project=project, key="rl:DSymFilesEndpoint:download:%s:%s" % (debug_file_id, project.id), limit=10, ) if rate_limited: logger.info( "notification.rate_limited", extra={ "project_id": project.id, "project_debug_file_id": debug_file_id }, ) return HttpResponse({"Too many download requests"}, status=403) debug_file = ProjectDebugFile.objects.filter(id=debug_file_id).first() if debug_file is None: raise Http404 try: fp = debug_file.file.getfile() response = StreamingHttpResponse( iter(lambda: fp.read(4096), b""), content_type="application/octet-stream") response["Content-Length"] = debug_file.file.size response["Content-Disposition"] = 'attachment; filename="%s%s"' % ( posixpath.basename(debug_file.debug_id), debug_file.file_extension, ) return response except IOError: raise Http404
def get(self, request, wizard_hash=None): """ This tries to retrieve and return the cache content if possible otherwise creates new cache """ if wizard_hash is not None: key = '%s%s' % (SETUP_WIZARD_CACHE_KEY, wizard_hash) wizard_data = default_cache.get(key) if wizard_data is None: return Response(status=404) elif wizard_data == 'empty': # when we just created a clean cache return Response(status=400) return Response(serialize(wizard_data)) else: # This creates a new available hash url for the project wizard rate_limited = ratelimits.is_limited( key='rl:setup-wizard:ip:%s' % request.META['REMOTE_ADDR'], limit=10, ) if rate_limited: logger.info('setup-wizard.rate-limit') return Response( { 'Too wizard requests', }, status=403 ) wizard_hash = get_random_string( 64, allowed_chars='abcdefghijklmnopqrstuvwxyz012345679') key = '%s%s' % (SETUP_WIZARD_CACHE_KEY, wizard_hash) default_cache.set(key, 'empty', SETUP_WIZARD_CACHE_TIMEOUT) return Response(serialize({'hash': wizard_hash}))
def post_process(self, event, **kwargs): rl_key = u"{}:{}".format(self.conf_key, event.project.organization_id) # limit segment to 50 requests/second limit, window = self.get_rate_limit() if limit and window and ratelimits.is_limited( rl_key, limit=limit, window=window): logger.info( "data_forwarding.skip_rate_limited", extra={ "event_id": event.event_id, "issue_id": event.group_id, "project_id": event.project_id, "organization_id": event.project.organization_id, }, ) return payload = self.get_event_payload(event) success = self.forward_event(event, payload) if success is False: # TODO(dcramer): record failure pass tsdb.incr(tsdb.models.project_total_forwarded, event.project.id, count=1)
def download(self, debug_file_id, project): rate_limited = ratelimits.is_limited( project=project, key='rl:DSymFilesEndpoint:download:%s:%s' % ( debug_file_id, project.id), limit=10, ) if rate_limited: logger.info('notification.rate_limited', extra={'project_id': project.id, 'project_debug_file_id': debug_file_id}) return HttpResponse( { 'Too many download requests', }, status=403 ) debug_file = ProjectDebugFile.objects.filter(id=debug_file_id).first() if debug_file is None: raise Http404 try: fp = debug_file.file.getfile() response = StreamingHttpResponse( iter(lambda: fp.read(4096), b''), content_type='application/octet-stream' ) response['Content-Length'] = debug_file.file.size response['Content-Disposition'] = 'attachment; filename="%s%s"' % (posixpath.basename( debug_file.debug_id ), debug_file.file_extension) return response except IOError: raise Http404
def should_notify(self, group, event): project = group.project rate_limited = ratelimits.is_limited(project=project, key=self.get_conf_key(), limit=10) if rate_limited: self.logger.info("notification.rate_limited", extra={"project_id": project.id}) return not rate_limited
def should_notify(self, group, event): if group.is_ignored(): return False project = group.project rate_limited = ratelimits.is_limited( project=project, key=self.get_conf_key(), limit=10, ) if rate_limited: self.logger.info('notification.rate_limited', extra={'project_id': project.id}) return not rate_limited
def post_process(self, event, **kwargs): rl_key = '{}:{}'.format( self.conf_key, event.project.organization_id, ) # limit segment to 50 requests/second limit, window = self.get_rate_limit() if limit and window and ratelimits.is_limited(rl_key, limit=limit, window=window): return payload = self.get_event_payload(event) success = self.forward_event(event, payload) if success is False: # TODO(dcramer): record failure pass tsdb.incr(tsdb.models.project_total_forwarded, event.project.id, count=1)
def is_ratelimited(self, event): self.initialize_variables(event) rl_key = self.get_rl_key(event) # limit segment to 50 requests/second limit, window = self.get_rate_limit() if limit and window and ratelimits.is_limited(rl_key, limit=limit, window=window): logger.info( "data_forwarding.skip_rate_limited", extra={ "event_id": event.event_id, "issue_id": event.group_id, "project_id": event.project_id, "organization_id": event.project.organization_id, }, ) return True return False
def download(self, project_dsym_id, project): rate_limited = ratelimits.is_limited( project=project, key='rl:DSymFilesEndpoint:download:%s:%s' % ( project_dsym_id, project.id), limit=10, ) if rate_limited: logger.info('notification.rate_limited', extra={'project_id': project.id, 'project_dsym_id': project_dsym_id}) return HttpResponse( { 'Too many download requests', }, status=403 ) debug_file = ProjectDSymFile.objects.filter( id=project_dsym_id ).first() if debug_file is None: raise Http404 suffix = ".dSYM" if debug_file.dsym_type == 'proguard' and debug_file.object_name == 'proguard-mapping': suffix = ".txt" try: fp = debug_file.file.getfile() response = StreamingHttpResponse( iter(lambda: fp.read(4096), b''), content_type='application/octet-stream' ) response['Content-Length'] = debug_file.file.size response['Content-Disposition'] = 'attachment; filename="%s%s"' % (posixpath.basename( debug_file.debug_id ), suffix) return response except IOError: raise Http404
def download(self, project_dsym_id, project): rate_limited = ratelimits.is_limited( project=project, key='rl:DSymFilesEndpoint:download:%s:%s' % ( project_dsym_id, project.id), limit=10, ) if rate_limited: logger.info('notification.rate_limited', extra={'project_id': project.id, 'project_dsym_id': project_dsym_id}) return HttpResponse( { 'Too many download requests', }, status=403 ) dsym = ProjectDSymFile.objects.filter( id=project_dsym_id ).first() if dsym is None: raise Http404 suffix = ".dSYM" if dsym.dsym_type == 'proguard' and dsym.object_name == 'proguard-mapping': suffix = ".txt" try: fp = dsym.file.getfile() response = StreamingHttpResponse( iter(lambda: fp.read(4096), b''), content_type='application/octet-stream' ) response['Content-Length'] = dsym.file.size response['Content-Disposition'] = 'attachment; filename="%s%s"' % (posixpath.basename( dsym.uuid ), suffix) return response except IOError: raise Http404
def __is_rate_limited(self, group, event): return ratelimits.is_limited(project=group.project, key=self.get_conf_key(), limit=10)
def __is_rate_limited(self, group, event): return ratelimits.is_limited( project=group.project, key=self.get_conf_key(), limit=10, )