def _get_attachment_metrics(part): incr_if_enabled('email_with_attachment', 1) fn = part.get_filename() ct = part.get_content_type() payload = part.get_payload(decode=True) payload_size = len(payload) if fn: extension = os.path.splitext(fn)[1] else: extension = mimetypes.guess_extension(ct) logger.error( 'Attachment found in email', extra={ 'content-type': ct, 'extension': extension, 'payload-size': payload_size } ) tag_type = 'attachment' attachment_extension_tag = generate_tag(tag_type, extension) attachment_content_type_tag = generate_tag(tag_type, ct) histogram_if_enabled( 'attachment.size', payload_size, [attachment_extension_tag, attachment_content_type_tag] ) return ct, extension, payload_size
def _get_attachment(part): incr_if_enabled('email_with_attachment', 1) fn = part.get_filename() ct = part.get_content_type() payload = part.get_payload(decode=True) payload_size = len(payload) if fn: extension = os.path.splitext(fn)[1] else: extension = mimetypes.guess_extension(ct) tag_type = 'attachment' attachment_extension_tag = generate_tag(tag_type, extension) attachment_content_type_tag = generate_tag(tag_type, ct) histogram_if_enabled( 'attachment.size', payload_size, [attachment_extension_tag, attachment_content_type_tag] ) attachment = SpooledTemporaryFile( max_size=150*1000, # 150KB max from SES suffix=extension, prefix=os.path.splitext(fn)[0] ) attachment.write(payload) return fn, attachment
def test_generate_tag_bad_data(): with pytest.raises(ValueError) as exc_info: generate_tag(42) assert str( exc_info.value) == 'key must be a string type, but got 42 instead' with pytest.raises(ValueError) as exc_info: generate_tag('key', 42) assert str(exc_info.value ) == 'value must be None or a string type, but got 42 instead'
def web_upload(request): context = {} if request.method == 'POST': form = forms.UploadForm(request.POST, request.FILES) if form.is_valid(): try: content = utils.preview_archive_content( form.cleaned_data['file'].file, form.cleaned_data['file'].name ) except BadZipfile as exception: return http.HttpResponseBadRequest(exception) error = check_symbols_archive_content(content) if error: return http.HttpResponseBadRequest(error) symbols_upload = models.SymbolsUpload.objects.create( user=request.user, content='', size=form.cleaned_data['file'].size, filename=os.path.basename(form.cleaned_data['file'].name), ) form.cleaned_data['file'].file.seek(0) bucket_name, bucket_location = get_bucket_name_and_location( request.user ) unpack_and_upload( utils.get_archive_members( form.cleaned_data['file'].file, form.cleaned_data['file'].name ), symbols_upload, bucket_name, bucket_location ) messages.success( request, '%s bytes of %s uploaded.' % ( symbols_upload.size, symbols_upload.filename ) ) metrics.incr('web_upload', tags=[generate_tag('email', request.user.email)]) return redirect('symbols:home') else: form = forms.UploadForm() context['form'] = form return render(request, 'symbols/web_upload.html', context)
def record_response(status_code): """Time request, (maybe) emit metrics, and (maybe) log this request. For static assets, metrics are skipped, and logs are skipped unless we're in the development environment. """ duration = time.time() - start if not is_static_content: # Emit a request.timing and a request metric duration_ms = round(duration * 1000) # Convert a URI to to a statsd acceptable metric stats_path = (request.path.replace("/", ".").lstrip(".").replace( "@", "-")) # Use generate_tag to lowercase, truncate to 200 characters statsd_tags = [ # Homepage is ".homepage", would otherwise be empty string / True generate_tag("path", stats_path or ".homepage"), generate_tag("method", request.method), # GET -> get, POST -> post ] METRICS.timing("request.timing", duration_ms, tags=statsd_tags) METRICS.incr( "request", tags=statsd_tags + [generate_tag("status", str(status_code))], ) if local_dev_env or not is_static_content: # Emit a canonical-log-line duration_s = round(duration, 3) logger = structlog.get_logger("canonical-log-line") logger.info( f"{request.method} {request.path} - {status_code}", http_status=status_code, duration_s=duration_s, )
def _ensure_hg_repository_sync(repo, do_update=False): tags = [generate_tag('repo', repo.name)] if repo.forest: tags.append(generate_tag('forest', repo.forest.name)) repopath = repo.local_path() try: with metrics.timer('hg-pull', tags=tags): return _hg_repository_sync(repopath, repo.url, do_update=do_update) except Exception as e: logging.error('Clone/update failed, {}'.format(e)) # something went wrong, let's just try again # nuke what we had if os.path.exists(repopath): shutil.rmtree(repopath, ignore_errors=True) logging.error('Removed {}'.format(repopath)) # now we need to create a clone and then pull all other origins other_repos = repo.forks.all() if repo.forest and repo.forest.fork_of: forests = [repo.forest.fork_of] forests.extend( repo.forest.fork_of.forks.exclude(archived=True).exclude( repo.forest)) other_repos = (Repository.objects.filter(forest__in=forests).filter( locale=repo.locale).exclude(archived=True)) elif repo.fork_of: other_repos = [repo.fork_of] other_repos.extend( repo.fork_of.forks.exclude(archived=True).exclude(id=repo.id)) tags.append(generate_tag('clone_type', 'full-clone')) logging.info('Cloning from {}'.format(str(repo.url))) with metrics.timer('hg-pull', tags=tags): hgrepo = _hg_repository_sync(repopath, repo.url, do_update=do_update) for other in other_repos: tags[0] = generate_tag('repo', other.name) logging.info('Pulling from {}'.format(str(other.url))) with metrics.timer('hg-pull', tags=tags): hgrepo.pull(source=str(other.url)) return hgrepo
def upload(request): for name in request.FILES: upload = request.FILES[name] size = upload.size break else: return http.HttpResponseBadRequest( "Must be multipart form data with key 'file'" ) if not size: return http.HttpResponseBadRequest('File size 0') content = utils.preview_archive_content(upload, name) error = check_symbols_archive_content(content) if error: return http.HttpResponseBadRequest(error) symbols_upload = models.SymbolsUpload.objects.create( user=request.user, size=size, content='', filename=name, ) bucket_name, bucket_location = get_bucket_name_and_location( request.user ) unpack_and_upload( utils.get_archive_members(upload, name), symbols_upload, bucket_name, bucket_location ) metrics.incr('api_upload', tags=[generate_tag('email', request.user.email)]) return http.HttpResponse('OK', status=201)
def get_tags(): return [ generate_tag("hostname", socket.gethostname()), generate_tag("pid", str(os.getpid())), generate_tag("thread_id", threading.current_thread().name), ]
def test_generate_tag(key, value, expected): assert generate_tag(key, value) == expected