def obj_create(self, bundle, request, **kwargs): form = NotificationForm(bundle.data) bill_conf_id = form.data.get('billing_config_id') log.info('Received notification for billing_config_id %r: ' 'bango_response_code: %r; bango_response_message: %r; ' 'bango_trans_id: %r' % (bill_conf_id, form.data.get('bango_response_code'), form.data.get('bango_response_message'), form.data.get('bango_trans_id'))) if not form.is_valid(): log.info('Notification invalid: %s' % bill_conf_id) raise self.form_errors(form) trans = form.cleaned_data['moz_transaction'] states = {OK: ['completed', STATUS_COMPLETED], CANCEL: ['cancelled', STATUS_CANCELLED]} message, state = states.get(form.cleaned_data['bango_response_code'], ['failed', STATUS_FAILED]) log.info('Transaction %s: %s' % (message, trans.uuid)) statsd.incr('bango.notification.%s' % message) trans.status = state # This is the id for the actual transaction, useful for refunds. trans.uid_support = form.cleaned_data['bango_trans_id'] # The price/currency may be empty for error notifications. trans.amount = form.cleaned_data['amount'] trans.currency = form.cleaned_data['currency'] trans.save() return bundle
def process_request(self, request): regions = mkt.regions.REGION_LOOKUP user_region = restofworld = mkt.regions.RESTOFWORLD if not getattr(request, 'API', False): request.REGION = restofworld mkt.regions.set_region(restofworld) return # Try 'region' in POST/GET data first, if it's not there try geoip. url_region = request.REQUEST.get('region') if url_region in regions: statsd.incr('z.regions.middleware.source.url') user_region = regions[url_region] log.info('Region {0} specified in URL; region set as {1}' .format(url_region, user_region.slug)) elif getattr(request, 'API_VERSION', None) == 1: # Fallback to GeoIP, but only for API version 1. statsd.incr('z.regions.middleware.source.geoip') user_region = self.region_from_request(request) log.info('Region not specified in URL; region set as {0}' .format(user_region.slug)) # Update the region on the user object if it changed. if (request.user.is_authenticated() and request.user.region != user_region.slug): request.user.region = user_region.slug request.user.save() # Persist the region on the request / local thread. self.store_region(request, user_region)
def preview_async(request): """Ajax preview of posts.""" statsd.incr('forums.preview') m = OutboxMessage(sender=request.user, message=request.POST.get('content', '')) return render(request, 'messages/includes/message_preview.html', { 'message': m})
def make_images(operation, params): statsd.incr("make_images") ouuid = operation.uuid tmpfilename = params['tmpfilename'] tmpdir = settings.TMP_DIR + "/imgs/" + str(ouuid) + "/" honey_badger(os.makedirs, tmpdir) size = os.stat(tmpfilename)[6] / (1024 * 1024) frames = size * 2 # 2 frames per MB at the most command = image_extract_command_for_file(tmpdir, frames, tmpfilename) os.system(command) imgs = os.listdir(tmpdir) if len(imgs) == 0: command = fallback_image_extract_command(tmpdir, frames, tmpfilename) os.system(command) # TODO: parameterize imgdir = "/var/www/wardenclyffe/uploads/images/%05d/" % operation.video.id honey_badger(os.makedirs, imgdir) imgs = os.listdir(tmpdir) imgs.sort() for img in imgs[:settings.MAX_FRAMES]: os.system("mv %s%s %s" % (tmpdir, img, imgdir)) Image.objects.create( video=operation.video, image="images/%05d/%s" % (operation.video.id, img)) statsd.incr("image_created") shutil.rmtree(tmpdir) if Poster.objects.filter(video=operation.video).count() == 0\ and len(imgs) > 0: # pick a random image out of the set and assign # it as the poster on the video r = random.randint(0, min(len(imgs), 50) - 1) image = Image.objects.filter(video=operation.video)[r] Poster.objects.create(video=operation.video, image=image) return ("complete", "created %d images" % len(imgs))
def register_user(sender, request, identity): user = UserProfile.objects.create_user( email=identity['email'], username=None, fxa_id=identity['uid']) log.info('Created user {} from FxA'.format(user)) statsd.incr('accounts.account_created_from_fxa') login_user(sender, request, user, identity) return user
def model_save(sender, **kwargs): instance = kwargs.get('instance') statsd.incr('models.%s.%s.%s' % ( instance._meta.app_label, instance._meta.object_name, 'create' if kwargs.get('created', False) else 'update', ))
def stop(self, cancelled=False): statsd.incr('timer.stopped') if self.is_stopped(): raise ValueError(u'The timer has already been stopped') last_log = self.log_set.order_by('created').last() # get last log if not last_log and not cancelled: raise ValueError(u'You can\'t stop a timer without a log') # stop and update this model self.stopped = timezone.now() # stop self.cancelled = cancelled if last_log: self.linked_case = last_log.case self.save() if self.linked_case: # update billable time on case cursor = connection.cursor() cursor.execute(''' select sum(ceiling(EXTRACT(epoch FROM a.stopped-a.created))) from timer_timer as a where a.cancelled = false and a.stopped is not null and a.linked_case_id = %s''', [self.linked_case.id]) total_billable_time, = cursor.fetchone() if total_billable_time: self.linked_case.billable_time = total_billable_time if total_billable_time: statsd.timing('timer.total_time', total_billable_time * 1000) self.linked_case.save(update_fields=['billable_time'])
def email_mentor_notification(sender, instance, raw, **kwargs): """Notify mentor when his/her mentee changes mentor on his/her profile.""" if not instance.mentor: return user_profile = get_object_or_none(UserProfile, user=instance.user) if not user_profile or not user_profile.mentor or raw: return if user_profile.mentor != instance.mentor: subject = '[Reps] Mentor reassignment.' email_template = 'emails/mentor_change_notification.txt' mentors_recipients = [user_profile.mentor.id, instance.mentor.id] rep_recipient = [instance.user.id] ctx_data = {'rep_user': instance.user, 'new_mentor': instance.mentor} send_remo_mail.delay(recipients_list=mentors_recipients, subject=subject, email_template=email_template, data=ctx_data, headers={'Reply-To': instance.user.email}) send_remo_mail.delay(recipients_list=rep_recipient, subject=subject, email_template=email_template, data=ctx_data, headers={'Reply-To': instance.mentor.email}) statsd.incr('profiles.change_mentor')
def create_inactive_user(self, username, password, email, locale=settings.LANGUAGE_CODE, text_template=None, html_template=None, subject=None, email_data=None, volunteer_interest=False, **kwargs): """ Create a new, inactive ``User`` and ``Profile``, generates a ``RegistrationProfile`` and email its activation key to the ``User``, returning the new ``User``. """ new_user = User.objects.create_user(username, email, password) new_user.is_active = False new_user.save() Profile.objects.create(user=new_user, locale=locale) registration_profile = self.create_profile(new_user) self.send_confirmation_email( registration_profile, text_template, html_template, subject, email_data, **kwargs) if volunteer_interest: statsd.incr('user.registered-as-contributor') group = Group.objects.get(name=CONTRIBUTOR_GROUP) new_user.groups.add(group) return new_user
def find_or_create_user(email, fxa_uid): def find_user(**kwargs): try: return UserProfile.objects.get(**kwargs) except UserProfile.DoesNotExist: return None profile = find_user(fxa_uid=fxa_uid) or find_user(email=email) if profile: created = False profile.update(fxa_uid=fxa_uid, email=email) else: created = True profile = UserProfile.objects.create( fxa_uid=fxa_uid, email=email, source=mkt.LOGIN_SOURCE_FXA, display_name=email.partition('@')[0], is_verified=True) if profile.source != mkt.LOGIN_SOURCE_FXA: log.info('Set account to FxA for {0}'.format(email)) statsd.incr('z.mkt.user.fxa') profile.update(source=mkt.LOGIN_SOURCE_FXA) return profile, created
def get_password_grant(self, request, data, client): form = ClientIdPasswordGrantForm(data, client=client) if not form.is_valid(): statsd.incr("login.failed") logger.info( "login failed", extra={ "IP": get_ip(request), "USERNAME": request.POST.get("username"), "CLIENT_SECRET": request.POST.get("client_secret"), "HTTP_REFERER": request.META.get("HTTP_REFERER"), "HTTP_USER_AGENT": request.META.get("HTTP_USER_AGENT"), }, ) form.on_form_invalid() raise OAuthError(form.errors) else: form.on_form_valid() statsd.incr("login.success") logger.info( "login succeeded", extra={ "IP": get_ip(request), "USERNAME": request.POST.get("username"), "CLIENT_SECRET": request.POST.get("client_secret"), "HTTP_REFERER": request.META.get("HTTP_REFERER"), "HTTP_USER_AGENT": request.META.get("HTTP_USER_AGENT"), }, ) return form.cleaned_data
def post(self, request, *args, **kwargs): # for this situation, authenticator expects # the usual `hmac` and `nonce`, plus # `as` to give a username (WC needs to associate operations with users) # and `redirect_to` set to the video_id # (that prevents the token from being intercepted and changed # to migrate a different video than specified) authenticator = MediathreadAuthenticator(request.POST) if not authenticator.is_valid(): statsd.incr("mediathread.auth_failure") return HttpResponse("invalid authentication token") user, created = User.objects.get_or_create( username=authenticator.username) if created: statsd.incr("mediathread.user_created") # remember, we're overloading the `redirect_to` field pk = authenticator.redirect_to video = get_object_or_404(Video, pk=pk) if video.has_panopto_source(): return HttpResponse('migration completed') folder = request.POST.get('folder', '') video.create_mediathread_update() submit_video_to_panopto(user, video, folder) return HttpResponse('ok')
def create_bango_transaction(sender, **kwargs): if sender.__class__._meta.resource_name != 'billing': return # Pull information from all the over the place. bundle = kwargs['bundle'].data data = kwargs['data'] form = kwargs['form'] seller_product = form.cleaned_data['seller_product_bango'].seller_product transaction, c = Transaction.objects.safer_get_or_create( uuid=data['transaction_uuid'], status=constants.STATUS_RECEIVED, provider=constants.SOURCE_BANGO, seller_product=seller_product) transaction.source = data.get('source', '') # uid_support will be set with the transaction id. # uid_pay is the uid of the billingConfiguration request. transaction.uid_pay = bundle['billingConfigurationId'] transaction.status = constants.STATUS_PENDING transaction.type = constants.TYPE_PAYMENT transaction.save() # This shows up in syslog: log.info('Bango transaction: %s pending' % (transaction.pk,)) # This does not! FIXME. bug 888075 log.info('Created trans from Bango %s, uuid %s; pending' % (transaction.pk, transaction.uuid)) statsd.incr('solitude.pending_transactions')
def dashboard_mozillians(request, user): args = {} user_profile = user.userprofile interestform = forms.TrackFunctionalAreasForm(request.POST or None, instance=user_profile) reps_email_form = forms.EmailRepsForm(request.POST or None) if interestform.is_valid(): interestform.save() messages.success(request, 'Interests successfully saved') return redirect('dashboard') if reps_email_form.is_valid(): functional_area = reps_email_form.cleaned_data['functional_area'] reps = (User.objects .filter(groups__name='Rep') .filter(userprofile__functional_areas=functional_area)) reps_email_form.send_email(request, reps) return redirect('dashboard') # Get the reps who match the specified interests interests = user.userprofile.tracked_functional_areas.all() tracked_interests = {} reps_past_events = {} reps_current_events = {} reps_ng_reports = {} today = now().date() unavailable_rep_exists = {} for interest in interests: # Get the Reps with the specified interest reps = User.objects.filter(groups__name='Rep').filter( userprofile__functional_areas=interest) tracked_interests[interest.name] = { 'id': interest.id, 'reps': reps} # Get the reports of the Reps with the specified interest ng_reports = NGReport.objects.filter(report_date__lte=today, functional_areas=interest, user__in=reps) reps_ng_reports[interest.name] = (ng_reports .order_by('-report_date')[:10]) # Get the events with the specified category events = Event.objects.filter(categories=interest) reps_past_events[interest.name] = events.filter(start__lt=now())[:50] reps_current_events[interest.name] = events.filter(start__gte=now()) # Check if there is an unavailable Rep for the specific interest unavailable_val = (UserStatus.objects.filter(is_unavailable=True) .filter(user__in=reps).exists()) unavailable_rep_exists[interest.name] = unavailable_val args['unavailable_rep_exists'] = unavailable_rep_exists args['reps_ng_reports'] = reps_ng_reports args['interestform'] = interestform args['reps_past_events'] = reps_past_events args['reps_current_events'] = reps_current_events args['tracked_interests'] = tracked_interests args['reps_email_form'] = reps_email_form statsd.incr('dashboard.dashboard_mozillians') return render(request, 'dashboard_mozillians.html', args)
def pull_from_tahoe_and_submit_to_pcp(operation, params): statsd.incr("pull_from_tahoe_and_submit_to_pcp") print "pulling from tahoe" params = loads(operation.params) video_id = params['video_id'] workflow = params['workflow'] video = Video.objects.get(id=video_id) ouuid = operation.uuid url = video.tahoe_download_url() if url == "": return ("failed", "does not have a tahoe stored file") if workflow == "": return ("failed", "no workflow specified") filename = video.filename() suffix = video.extension() t = tempfile.NamedTemporaryFile(suffix=suffix) r = urllib2.urlopen(url) t.write(r.read()) t.seek(0) operation.log(info="downloaded from tahoe") # TODO: figure out how to re-use submit_to_pcp() print "submitting to PCP" pcp = PCP(settings.PCP_BASE_URL, settings.PCP_USERNAME, settings.PCP_PASSWORD) filename = str(ouuid) + suffix print "submitted with filename %s" % filename title = "%s-%s" % (str(ouuid), strip_special_characters(video.title)) print "submitted with title %s" % title pcp.upload_file(t, filename, workflow, title, video.description) return ("submitted", "submitted to PCP")
def create_update_passive_event_report(sender, instance, created, **kwargs): """Automatically create/update a passive report on event creation.""" from remo.events.templatetags.helpers import get_event_link attrs = { 'report_date': instance.start.date(), 'longitude': instance.lon, 'latitude': instance.lat, 'location': "%s, %s, %s" % (instance.city, instance.region, instance.country), 'link': get_event_link(instance), 'activity_description': instance.description, 'is_passive': True, 'event': instance, 'campaign': instance.campaign, 'country': instance.country } if created: activity = Activity.objects.get(name=ACTIVITY_EVENT_CREATE) attrs.update({'user': instance.owner, 'activity': activity}) NGReport.objects.create(**attrs) statsd.incr('reports.create_passive_event') else: reports = (NGReport.objects.filter(event=instance) .exclude(activity__name=ACTIVITY_POST_EVENT_METRICS)) reports.update(**attrs) # Change user and mentor to the appropriate reports attrs.update({'user': instance.owner, 'mentor': instance.owner.userprofile.mentor}) reports.exclude(activity__name=ACTIVITY_EVENT_ATTEND).update(**attrs) statsd.incr('reports.update_passive_event')
def video_mediathread_submit(request, id): video = get_object_or_404(Video, id=id) if request.method == "POST": statsd.incr("mediathread.submit") params = dict(set_course=request.POST.get('course', '')) o = Operation.objects.create(uuid=uuid.uuid4(), video=video, action="submit to mediathread", status="enqueued", params=dumps(params), owner=request.user, ) maintasks.process_operation.delay(o.id, params) o.video.clear_mediathread_submit() return HttpResponseRedirect(video.get_absolute_url()) try: url = (settings.MEDIATHREAD_BASE + "/api/user/courses?secret=" + settings.MEDIATHREAD_SECRET + "&user="******"MEDIATHREAD_CREDENTIALS"): credentials = settings.MEDIATHREAD_CREDENTIALS response = GET(url, credentials=credentials) courses = loads(response)['courses'] courses = [dict(id=k, title=v['title']) for (k, v) in courses.items()] courses.sort(key=lambda x: x['title'].lower()) except: courses = [] return dict(video=video, courses=courses, mediathread_base=settings.MEDIATHREAD_BASE)
def pull_from_s3_and_submit_to_pcp(operation, params): statsd.incr("pull_from_s3_and_submit_to_pcp") print "pulling from S3" params = loads(operation.params) video_id = params['video_id'] workflow = params['workflow'] video = Video.objects.get(id=video_id) ouuid = operation.uuid filename = video.filename() suffix = video.extension() conn = boto.connect_s3( settings.AWS_ACCESS_KEY, settings.AWS_SECRET_KEY) bucket = conn.get_bucket(settings.AWS_S3_UPLOAD_BUCKET) k = Key(bucket) k.key = video.s3_key() t = tempfile.NamedTemporaryFile(suffix=suffix) k.get_contents_to_file(t) t.seek(0) operation.log(info="downloaded from S3") # TODO: figure out how to re-use submit_to_pcp() print "submitting to PCP" pcp = PCP(settings.PCP_BASE_URL, settings.PCP_USERNAME, settings.PCP_PASSWORD) filename = str(ouuid) + suffix print "submitted with filename %s" % filename title = "%s-%s" % (str(ouuid), strip_special_characters(video.title)) print "submitted with title %s" % title pcp.upload_file(t, filename, workflow, title, video.description) return ("submitted", "submitted to PCP")
def save_file_to_s3(operation, params): if not waffle.switch_is_active('enable_s3'): print "S3 uploads are disabled" return ("complete", "S3 uploads temporarily disabled") statsd.incr("save_file_to_s3") conn = boto.connect_s3( settings.AWS_ACCESS_KEY, settings.AWS_SECRET_KEY) bucket = conn.get_bucket(settings.AWS_S3_UPLOAD_BUCKET) k = Key(bucket) # make a YYYY/MM/DD directory to put the file in source_file = open(params['tmpfilename'], "rb") n = datetime.now() key = "%04d/%02d/%02d/%s" % ( n.year, n.month, n.day, os.path.basename(params['tmpfilename'])) k.key = key k.set_contents_from_file(source_file) source_file.close() f = File.objects.create(video=operation.video, url="", cap=key, location_type="s3", filename=params['filename'], label="uploaded source file (S3)") OperationFile.objects.create(operation=operation, file=f) return ("complete", "")
def mediathread(request): # check their credentials nonce = request.GET.get('nonce', '') hmc = request.GET.get('hmac', '') set_course = request.GET.get('set_course', '') username = request.GET.get('as') redirect_to = request.GET.get('redirect_url', '') verify = hmac.new(settings.MEDIATHREAD_SECRET, '%s:%s:%s' % (username, redirect_to, nonce), hashlib.sha1 ).hexdigest() if verify != hmc: statsd.incr("mediathread.auth_failure") return HttpResponse("invalid authentication token") try: user = User.objects.get(username=username) except User.DoesNotExist: user = User.objects.create(username=username) statsd.incr("mediathread.user_created") request.session['username'] = username request.session['set_course'] = set_course request.session['nonce'] = nonce request.session['redirect_to'] = redirect_to request.session['hmac'] = hmc audio = request.GET.get('audio', False) audio2 = request.GET.get('audio2', False) return dict(username=username, user=user, audio=audio or audio2, audio2=audio2, )
def pull_from_cuit_and_submit_to_pcp(operation, params): statsd.incr("pull_from_cuit_and_submit_to_pcp") print "pulling from tahoe" params = loads(operation.params) video_id = params['video_id'] workflow = params['workflow'] video = Video.objects.get(id=video_id) if workflow == "": return ("failed", "no workflow specified") ouuid = operation.uuid cuit_file = video.file_set.filter(video=video, location_type="cuit")[0] filename = cuit_file.filename extension = os.path.splitext(filename)[1] tmpfilename = os.path.join(settings.TMP_DIR, str(ouuid) + extension) sftp_get(filename, tmpfilename) operation.log(info="downloaded from cuit") print "submitting to PCP" pcp = PCP(settings.PCP_BASE_URL, settings.PCP_USERNAME, settings.PCP_PASSWORD) filename = str(ouuid) + extension print "submitted with filename %s" % filename title = "%s-%s" % (str(ouuid), strip_special_characters(video.title)) print "submitted with title %s" % title pcp.upload_file(open(tmpfilename, "r"), filename, workflow, title, video.description) return ("submitted", "submitted to PCP")
def automated_poll(sender, instance, **kwargs): """Create a radio poll automatically. If a bug lands in our database with council_vote_requested, create a new Poll and let Council members vote. """ if ((not instance.council_vote_requested or Poll.objects.filter(bug=instance).exists())): return remobot = User.objects.get(username='******') with transaction.commit_on_success(): poll = (Poll.objects .create(name=instance.summary, description=instance.first_comment, valid_groups=Group.objects.get(name='Council'), start=(now() + timedelta(BUDGET_REQUEST_PERIOD_START)), end=(now() + timedelta(days=BUDGET_REQUEST_PERIOD_END)), bug=instance, created_by=remobot, automated_poll=True)) radio_poll = RadioPoll.objects.create(poll=poll, question='Budget Approval') RadioPollChoice.objects.create(answer='Approved', radio_poll=radio_poll) RadioPollChoice.objects.create(answer='Denied', radio_poll=radio_poll) statsd.incr('voting.create_automated_poll')
def fetch_snippets(request, **kwargs): """Determine which snippet-fetching method to use.""" statsd.incr('serve.snippets') if settings.SERVE_SNIPPET_BUNDLES: return fetch_pregenerated_snippets(request, **kwargs) else: return fetch_render_snippets(request, **kwargs)
def create_update_passive_event_report(sender, instance, created, **kwargs): """Automatically create/update a passive report on event creation.""" from remo.events.helpers import get_event_link attrs = { 'report_date': instance.start.date(), 'longitude': instance.lon, 'latitude': instance.lat, 'location': "%s, %s, %s" % (instance.city, instance.region, instance.country), 'link': get_event_link(instance), 'activity_description': instance.description} if created: activity = Activity.objects.get(name=ACTIVITY_EVENT_CREATE) attrs.update({ 'user': instance.owner, 'event': instance, 'activity': activity, 'is_passive': True}) report = NGReport.objects.create(**attrs) report.functional_areas.add(*instance.categories.all()) statsd.incr('reports.create_passive_event') else: reports = (NGReport.objects.filter(event=instance) .exclude(activity__name=ACTIVITY_POST_EVENT_METRICS)) reports.update(**attrs) statsd.incr('reports.update_passive_event')
def log_staff_modified(sender, instance, **kwargs): try: sender.objects.get(pk=instance.pk) except sender.DoesNotExist: return statsd.incr("specialiststaff.modified") logger.info( "Specialist user modified", extra={ "USERNAME": instance.user.username, "PROVIDER": instance.provider.name, "IS_MANAGER": unicode(instance.is_manager), }, ) message = email_template.format( "modified", date_format(localtime(now()), "SHORT_DATETIME_FORMAT"), instance.user.username, instance.provider.name, unicode(instance.is_manager), ) send_mail( "Specialist user modified", message, settings.EMAIL_FROM_ADDRESS, settings.OPERATOR_USER_ALERT_EMAILS, fail_silently=True, )
def send_activation_email(request): form = SendActivationEmailForm(request.POST) if not form.is_valid(): return send_activation_email_page(request) email = form.cleaned_data['email'] users = User.objects.filter(email=email) # Don't reveal if we don't have that email, to prevent email harvesting if len(users) == 1: user = users[0] if user.is_active: return redirect('activated') site = Site.objects.get_current() try: registration_profile = RegistrationProfile.objects.get(user=user) registration_profile.send_activation_email(site) statsd.incr('email.message.types.resend_activation') except RegistrationProfile.DoesNotExist: # TODO: Generate a Registration Profile and send the email. # This is not likely to happen, because any user created through # Django registration should have a profile pass return redirect('activation_email_sent')
def refresh_token(self, force_refresh=False): """ Called from many different places right before executing a SOAP call """ # If we don't already have a token or the token expires within 5 min(300 seconds), get one self.refresh_auth_tokens_from_cache() if force_refresh or self.authToken is None or self.token_is_expired(): payload = { 'clientId': self.client_id, 'clientSecret': self.client_secret, 'accessType': 'offline', } if self.refreshKey: payload['refreshToken'] = self.refreshKey token_response = self.request_token(payload) statsd.incr('news.backends.sfmc.auth_token_refresh') self.authToken = token_response['accessToken'] self.authTokenExpiresIn = token_response['expiresIn'] self.authTokenExpiration = time() + self.authTokenExpiresIn self.internalAuthToken = token_response['legacyToken'] if 'refreshToken' in token_response: self.refreshKey = token_response['refreshToken'] self.build_soap_client() self.cache_auth_tokens()
def add(request): """Displays/processes a form to create a collection.""" data = {} if request.method == 'POST': form = forms.CollectionForm( request.POST, request.FILES, initial=initial_data_from_request(request)) aform = forms.AddonsForm(request.POST) if form.is_valid(): collection = form.save(default_locale=request.LANG) collection.save() if aform.is_valid(): aform.save(collection) collection_message(request, collection, 'add') statsd.incr('collections.created') log.info('Created collection %s' % collection.id) return http.HttpResponseRedirect(collection.get_url_path()) else: data['addons'] = Addon.objects.filter(pk__in=aform.clean_addon()) data['comments'] = aform.clean_addon_comment() else: form = forms.CollectionForm() data.update(form=form, filter=get_filter(request)) return render_cat(request, 'bandwagon/add.html', data)
def save(self, *args, **kwargs): """Create post event data report.""" event = super(PostEventForm, self).save() activity = Activity.objects.get(name=ACTIVITY_POST_EVENT_METRICS) reports = NGReport.objects.filter(event=event, activity=activity) if not reports: up = event.owner.userprofile attrs = { 'activity': activity, 'report_date': get_date(), 'longitude': up.lon, 'latitude': up.lat, 'location': '%s, %s, %s' % (up.city, up.region, up.country), 'link': get_event_link(event), 'is_passive': True, 'event': event, 'user': event.owner } report = NGReport.objects.create(**attrs) report.functional_areas.add(*event.categories.all()) statsd.incr('reports.create_passive_post_event_metrics') return event
def log_staff_created(sender, instance, created, **kwargs): if created: statsd.incr("specialiststaff.created") logger.info( "Specialist user created", extra={ "USERNAME": instance.user.username, "PROVIDER": instance.provider.name, "IS_MANAGER": unicode(instance.is_manager), }, ) message = email_template.format( "created", date_format(localtime(now()), "SHORT_DATETIME_FORMAT"), instance.user.username, instance.provider.name, unicode(instance.is_manager), ) send_mail( "Specialist user added", message, settings.EMAIL_FROM_ADDRESS, settings.OPERATOR_USER_ALERT_EMAILS, fail_silently=True, )
def post(self, request, question_id): statsd.incr('view_question_n_answer_views_AnswerList_POST') try: q = Question.objects.get(pk=question_id) except (Question.DoesNotExist, ValidationError): raise Http404 serializer = AnswerSerializer(data=request.data) if serializer.is_valid(): serializer.save(question_id=q.pk, user_id=request.user.pk) # TODO if not settings.TESTING: answer_id = serializer.data.get('answer_id') sns_msg = { 'on': 'question_answered', 'question_id': question_id, 'question_creator_email': q.user.username, 'question_text': q.question_text, 'question_url': rest_reverse('get_put_del_a_question', args=[question_id], request=request), 'answer_id': answer_id, 'answer_text': serializer.data.get('answer_text'), 'answer_url': rest_reverse('get_put_del_an_answer', args=[question_id, answer_id], request=request) } sns.publish(TopicArn=settings.AWS_SNS_TOPIC_ARN, Message=json.dumps(sns_msg)) else: print('Testing - Answer a question') return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def log_operator_created(sender, instance, created, **kwargs): if created: statsd.incr('operator.created') logger.info('Operator user created', extra={ 'USERNAME': instance.user.username, 'IS_MANAGER': unicode(instance.is_manager) }) message = email_template.format( 'created', date_format(localtime(now()), 'SHORT_DATETIME_FORMAT'), instance.user.username, unicode(instance.is_manager), ) send_mail('Operator user added', message, settings.EMAIL_FROM_ADDRESS, settings.OPERATOR_USER_ALERT_EMAILS, fail_silently=True)
def get_queryset(self): ids = [item.addon_id for item in self.get_discopane_items()] # FIXME: Implement using ES. It would look like something like this, # with a specific serializer that inherits from the ES one + code to # build the dict: # es = amo.search.get_es() # es.mget({'ids': ids}, index=AddonIndexer.get_index_alias(), # doc_type=AddonIndexer.get_doctype_name()) addons = Addon.objects.public().in_bulk(ids) # Patch items to add addons. result = [] for item in self.get_discopane_items(): try: item.addon = addons[item.addon_id] result.append(item) except KeyError: # Ignore this missing add-on, but increment a counter so we # know something happened. statsd.incr('discovery.api.missing_item') return result
def approve(self, version): """Do the approval itself, caling ReviewHelper to change the status, sign the files, send the e-mail, etc.""" # Note: this should automatically use the TASK_USER_ID user. helper = ReviewHelper(addon=version.addon, version=version) if version.channel == amo.RELEASE_CHANNEL_LISTED: helper.handler.data = { # The comment is not translated on purpose, to behave like # regular human approval does. 'comments': 'This version has been screened and approved for the ' 'public. Keep in mind that other reviewers may look into ' 'this version in the future and determine that it ' 'requires changes or should be taken down. In that case, ' 'you will be notified again with details and next steps.' '\r\n\r\nThank you!' } else: helper.handler.data = {'comments': 'automatic validation'} helper.handler.process_public() statsd.incr('reviewers.auto_approve.approve.success')
def push(request, object_id): deployment = get_object_or_404(Deployment, id=object_id) if request.method == "POST": if deployment.can_push(request.user): statsd.incr('event.push') push = deployment.new_push(user=request.user, comment=request.POST.get('comment', '')) for k in request.POST.keys(): if k.startswith("flag_"): flag_id = k[len("flag_"):] flag = Flag.objects.get(id=flag_id) value = request.POST[k] FlagValue.objects.create(flag=flag, push=push, value=value) if request.POST.get('step', ''): return HttpResponseRedirect(push.get_absolute_url() + "?step=1") else: return HttpResponseRedirect(push.get_absolute_url()) else: return HttpResponse("POST requests, only, please")
def create_passive_attendance_report(sender, instance, **kwargs): """Automatically create a passive report after event attendance save.""" if instance.user.groups.filter(name='Rep').exists(): activity = Activity.objects.get(name=ACTIVITY_EVENT_ATTEND) attrs = { 'user': instance.user, 'event': instance.event, 'activity': activity, 'report_date': instance.event.start.date(), 'longitude': instance.event.lon, 'latitude': instance.event.lat, 'location': "%s, %s, %s" % (instance.event.city, instance.event.region, instance.event.country), 'is_passive': True, 'link': get_event_link(instance.event), 'activity_description': instance.event.description} report = NGReport.objects.create(**attrs) report.functional_areas.add(*instance.event.categories.all()) statsd.incr('reports.create_passive_attendance')
def handle(self, **options): try: # Sends all writes to the master DB. Slaves are readonly. pin_this_thread() docs = Document.objects.all() for d in docs: revs = Revision.objects.filter(document=d, is_approved=True) revs = list(revs.order_by('-reviewed')[:1]) if len(revs): rev = revs[0] if d.current_revision != rev: d.current_revision = rev d.save() print d.get_absolute_url() statsd.incr('wiki.cron.fix-current-revision') finally: unpin_this_thread()
def post(request, content, locale=settings.LANGUAGE_CODE): ks_url = settings.KUMASCRIPT_URL_TEMPLATE.format(path='') headers = { 'X-FireLogger': '1.2', } env_vars = dict(url=request.build_absolute_uri('/'), locale=locale) add_env_headers(headers, env_vars) data = content.encode('utf8') statsd.incr('wiki.ks_post') with statsd.timer('wiki.ks_post'): resp = requests.post(ks_url, timeout=constance.config.KUMASCRIPT_TIMEOUT, data=data, headers=headers) if resp: resp_body = process_body(resp) resp_errors = process_errors(resp) return resp_body, resp_errors else: resp_errors = KUMASCRIPT_TIMEOUT_ERROR return content, resp_errors
def call_recommendation_server(telemetry_id, locale, platform): params = [(key, value) for key, value in (('locale', locale), ('platform', platform)) if value] endpoint = urlparse.urljoin( settings.RECOMMENDATION_ENGINE_URL, '%s/%s%s' % (telemetry_id, '?' if params else '', urlencode(params))) log.debug(u'Calling recommendation server: {0}'.format(endpoint)) try: with statsd.timer('services.recommendations'): response = requests.get( endpoint, timeout=settings.RECOMMENDATION_ENGINE_TIMEOUT) if response.status_code != 200: raise requests.exceptions.RequestException() except requests.exceptions.RequestException as e: log.error(u'Calling recommendation engine failed: {0}'.format(e)) statsd.incr('services.recommendations.fail') return [] else: statsd.incr('services.recommendations.success') return json.loads(response.content).get('results', [])
def delete_thread(request, document_slug, thread_id): """Delete a thread.""" doc = get_document(document_slug, request) thread = get_object_or_404(Thread, pk=thread_id, document=doc) if request.method == 'GET': # Render the confirmation page return render(request, 'kbforums/confirm_thread_delete.html', { 'document': doc, 'thread': thread }) # Handle confirm delete form POST log.warning('User %s is deleting KB thread with id=%s' % (request.user, thread.id)) thread.delete() statsd.incr('kbforums.delete_thread') return HttpResponseRedirect( reverse('wiki.discuss.threads', args=[document_slug]))
def send_first_report_notification(): """Send inactivity notification after 4 weeks.""" today = get_date() start = today - timedelta(weeks=4) end = today + timedelta(weeks=4) users = User.objects.filter( groups__name='Rep', userprofile__registration_complete=True, userprofile__first_report_notification__isnull=True, userprofile__is_unavailable=False) # Exclude users with a report filed between start and end period # and users who joined the program less than one month inactive_users = (users.exclude( ng_reports__report_date__range=[start, end]).exclude( userprofile__date_joined_program__gt=start)) send_report_notification(inactive_users, weeks=4) for user in inactive_users: user.userprofile.first_report_notification = today user.userprofile.save() statsd.incr('reports.send_first_report_notification')
def index_task(cls, id_list, **kw): """Index documents specified by cls and ids""" statsd.incr("search.tasks.index_task.%s" % cls.get_mapping_type_name()) try: # Pin to master db to avoid replication lag issues and stale # data. pin_this_thread() qs = cls.get_model().objects.filter(pk__in=id_list).values_list( "pk", flat=True) for id_ in qs: try: cls.index(cls.extract_document(id_), id_=id_) except UnindexMeBro: # If extract_document throws this, then we need to # remove this item from the index. cls.unindex(id_) except Exception as exc: retries = index_task.request.retries if retries >= MAX_RETRIES: # Some exceptions aren't pickleable and we need this to # throw things that are pickleable. raise IndexingTaskError() statsd.incr("search.tasks.index_task.retry", 1) statsd.incr("search.tasks.index_task.retry%d" % RETRY_TIMES[retries], 1) index_task.retry(exc=exc, max_retries=MAX_RETRIES, countdown=RETRY_TIMES[retries]) finally: unpin_this_thread()
def _call_salesforce(self, method, url, **kwargs): if self.session_is_expired(): self.refresh_session() kwargs['timeout'] = settings.SFDC_REQUEST_TIMEOUT try: statsd.incr('news.backends.sfdc.call_salesforce') resp = super(RefreshingSFType, self)._call_salesforce(method, url, **kwargs) except sfapi.SalesforceExpiredSession: statsd.incr('news.backends.sfdc.call_salesforce') statsd.incr('news.backends.sfdc.session_expired') self.refresh_session() resp = super(RefreshingSFType, self)._call_salesforce(method, url, **kwargs) limit_info = resp.headers.get('sforce-limit-info') if limit_info: usages = sfapi.Salesforce.parse_api_usage(limit_info) usage = usages.get('api-usage') if usage: statsd.gauge('news.backends.sfdc.daily_api_used', int(usage.used), rate=0.5) statsd.gauge('news.backends.sfdc.daily_api_limit', int(usage.total), rate=0.5) percentage = float(usage.used) / float(usage.total) * 100 statsd.gauge('news.backends.sfdc.percent_daily_api_used', percentage, rate=0.5) return resp
def update_entry(self, entry): statsd.incr("update_entry") guid = get_entry_guid(entry) if not guid: # no guid? can't do anything with it return r = self.entry_set.filter(guid=guid[:256]) if r.count() > 0: # already have this one, so nothing to do return published = extract_published(entry) try: statsd.incr("create_entry") e = Entry.objects.create( feed=self, guid=guid[:256], link=entry.get('link', u"")[:200], title=entry.get('title', u"no title")[:256], description=entry.get( 'description', entry.get('summary', u"")), author=entry.get('author', u"")[:256], published=published, ) e.fanout() except Exception, e: statsd.incr("create_entry_exception") print str(e)
def lookup(self, address): """Resolve an IP address to a block of geo information. If a given address is unresolvable or the geoip server is not defined, return the default as defined by the settings, or "worldwide". """ if self.url and waffle.switch_is_active('geoip-geodude'): with statsd.timer('z.geoip'): res = None try: res = requests.post('{0}/country.json'.format(self.url), timeout=self.timeout, data={'ip': address}) except requests.Timeout: statsd.incr('z.geoip.timeout') log.error( ('Geodude timed out looking up: {0}'.format(address))) except requests.RequestException as e: statsd.incr('z.geoip.error') log.error('Geodude connection error: {0}'.format(str(e))) if res and res.status_code == 200: statsd.incr('z.geoip.success') return res.json().get('country_code', self.default_val).lower() return self.default_val
def authenticate(self, request): """ Returns a `User` if a correct username and password have been supplied in the HTTP Post authentication. Otherwise returns `None`. """ userid, password, org = \ request.DATA.get('CHSUserName'), \ request.DATA.get('CHSPassword'), \ request.DATA.get( 'CHSOrganisationID', request.DATA.get('CHSOrgansationID') ) if not all([userid, password, org]): statsd.incr('provider_extract.malformed') return None try: staff = Staff.objects.get(chs_user=userid, chs_organisation=org) except (Staff.DoesNotExist, Staff.MultipleObjectsReturned): statsd.incr('provider_extract.auth_failed') raise AuthenticationFailed('Invalid username/password') user = staff.user if user is None or not user.is_active or not check_password(password, staff.chs_password): statsd.incr('provider_extract.auth_failed') raise AuthenticationFailed('Invalid username/password') return (user, None)
def handle(self, *args, **options): api = slumber.API(settings.FUM_API_ENDPOINT, auth=_slumber_auth.TokenAuth( settings.FUM_ACCESS_TOKEN)) cache = get_cache("user_hashes") c = 1 while True: data = api.users.get(page=c) for user in data["results"]: user_hash = hashlib.sha512(str(user)).hexdigest() cache_key = "user-refresh-hash-%s" % user["username"] stored_hash = cache.get(cache_key) if stored_hash == user_hash: sd.incr( "login_frontend.management.refresh_users.no_changes") continue cache.set(cache_key, user_hash, 86400 + random.randint(0, 7200)) sd.incr("login_frontend.management.refresh_users.refresh") status = refresh_user(user) if status: sd.incr("login_frontend.management.refresh_users.updated") self.stdout.write('Refreshed %s (%s, %s, %s)' % (user.get("username"), user.get("email"), user.get("phone1"), user.get("phone2"))) if "next" not in data or data["next"] is None: break c += 1 self.stdout.write('Successfully fetched all users')
def run_yara(results, upload_pk): """ Apply a set of Yara rules on a FileUpload and store the Yara results (matches). This task is intended to be run as part of the submission process only. When a version is created from a FileUpload, the files are removed. In addition, we usually delete old FileUpload entries after 180 days. - `results` are the validation results passed in the validation chain. This task is a validation task, which is why it must receive the validation results as first argument. - `upload_pk` is the FileUpload ID. """ log.info('Starting yara task for FileUpload %s.', upload_pk) if not results['metadata']['is_webextension']: log.info('Not running yara for FileUpload %s, it is not a ' 'webextension.', upload_pk) return results upload = FileUpload.objects.get(pk=upload_pk) try: scanner_result = ScannerResult(upload=upload, scanner=YARA) with statsd.timer('devhub.yara'): rules = yara.compile(filepath=settings.YARA_RULES_FILEPATH) zip_file = SafeZip(source=upload.path) for zip_info in zip_file.info_list: if not zip_info.is_dir(): file_content = zip_file.read(zip_info).decode( errors='ignore' ) for match in rules.match(data=file_content): # Add the filename to the meta dict. meta = {**match.meta, 'filename': zip_info.filename} scanner_result.add_yara_result( rule=match.rule, tags=match.tags, meta=meta ) zip_file.close() scanner_result.save() if scanner_result.has_matches: statsd.incr('devhub.yara.has_matches') statsd.incr('devhub.yara.success') log.info('Ending scanner "yara" task for FileUpload %s.', upload_pk) except Exception: statsd.incr('devhub.yara.failure') # We log the exception but we do not raise to avoid perturbing the # submission flow. log.exception('Error in scanner "yara" task for FileUpload %s.', upload_pk) return results
def _call_salesforce(self, method, url, **kwargs): if self.session_is_expired(): self.refresh_session() kwargs["timeout"] = settings.SFDC_REQUEST_TIMEOUT try: statsd.incr("news.backends.sfdc.call_salesforce") resp = super()._call_salesforce(method, url, **kwargs) except sfapi.SalesforceExpiredSession: statsd.incr("news.backends.sfdc.call_salesforce") statsd.incr("news.backends.sfdc.session_expired") self.refresh_session() resp = super()._call_salesforce(method, url, **kwargs) if self.api_usage: usage = self.api_usage.get("api-usage") if usage: statsd.gauge( "news.backends.sfdc.daily_api_used", int(usage.used), rate=0.5, ) statsd.gauge( "news.backends.sfdc.daily_api_limit", int(usage.total), rate=0.5, ) percentage = float(usage.used) / float(usage.total) * 100 statsd.gauge( "news.backends.sfdc.percent_daily_api_used", percentage, rate=0.5, ) return resp
def process_request(self, request): if 'HTTP_DNT' not in request.META: statsd.incr('z.mkt.dnt.unset') elif request.META.get('HTTP_DNT') == '1': statsd.incr('z.mkt.dnt.on') else: statsd.incr('z.mkt.dnt.off')
def _documents_for(locale, topics=None, products=None): """Returns a list of articles that apply to passed in topics and products. """ # First try to get the results from the cache documents = cache.get(_documents_for_cache_key(locale, topics, products)) if documents: statsd.incr('wiki.facets.documents_for.cache') return documents try: # Then try ES documents = _es_documents_for(locale, topics, products) cache.add(_documents_for_cache_key(locale, topics, products), documents) statsd.incr('wiki.facets.documents_for.es') except TransportError: # Finally, hit the database (through cache machine) # NOTE: The documents will be the same ones returned by ES # but they won't be in the correct sort (by votes in the last # 30 days). It is better to return them in the wrong order # than not to return them at all. documents = _db_documents_for(locale, topics, products) statsd.incr('wiki.facets.documents_for.db') return documents
def call_adzerk_server(placeholders): """Call adzerk server to get sponsored addon results. `placeholders` is a list of arbitrary strings that we pass so we can identify the order of the results in the response dict.""" site_id = settings.ADZERK_SITE_ID network_id = settings.ADZERK_NETWORK_ID placements = [{ "divName": ph, "networkId": network_id, "siteId": site_id, "adTypes": [5] } for ph in placeholders] json_response = {} try: log.info('Calling adzerk') with statsd.timer('services.adzerk'): response = requests.post(settings.ADZERK_URL, json={'placements': placements}, timeout=settings.ADZERK_TIMEOUT) if response.status_code != 200: raise requests.exceptions.RequestException() json_response = response.json() except requests.exceptions.RequestException as e: log.exception('Calling adzerk failed: %s', e) statsd.incr('services.adzerk.fail') except ValueError as e: log.exception('Decoding adzerk response failed: %s', e) statsd.incr('services.adzerk.fail') else: statsd.incr('services.adzerk.success') return json_response
def process_common_voice_batch(): if not settings.COMMON_VOICE_BATCH_PROCESSING: return updates = CommonVoiceUpdate.objects.filter( ack=False)[:settings.COMMON_VOICE_BATCH_CHUNK_SIZE] per_user = {} for update in updates: # last_active_date is when the update was sent basically, so we can use it for ordering data = update.data last_active = isoparse(data["last_active_date"]) if (data["email"] in per_user and per_user[data["email"]]["last_active"] > last_active): continue per_user[data["email"]] = { "last_active": last_active, "data": data, } for info in per_user.values(): record_common_voice_update.delay(info["data"]) for update in updates: # do them one at a time to ensure that we don't ack new ones that have # come in since we started update.ack = True update.save() statsd.incr("news.tasks.process_common_voice_batch.all_updates", len(updates)) # delete ack'd updates more than 24 hours old when = now() - timedelta(hours=24) deleted, _ = CommonVoiceUpdate.objects.filter(ack=True, when__lte=when).delete() statsd.incr("news.tasks.process_common_voice_batch.deleted", deleted) statsd.gauge( "news.tasks.process_common_voice_batch.queue_volume", CommonVoiceUpdate.objects.filter(ack=False).count(), )
def process_subhub_event_customer_created(data): """ Event name: customer.created Creates or updates a SFDC customer when a new payment processor/Stripe customer is created """ statsd.incr('news.tasks.process_subhub_event.customer_created') first, last = split_name(data['name']) contact_data = {'fxa_id': data['user_id'], 'payee_id': data['customer_id']} user_data = get_user_data(email=data['email']) # if user was found in sfdc, see if we should update their name(s) if user_data: # if current last name is '_', update it if user_data['last_name'] == '_': contact_data['last_name'] = last # if current last name is blank/Null, update it if not user_data['first_name']: contact_data['first_name'] = first sfdc.update(user_data, contact_data) statsd.incr('news.tasks.process_subhub_event.customer_created.updated') # if no user was found, create new user in sfdc else: contact_data['email'] = data['email'] contact_data['first_name'] = first contact_data['last_name'] = last # create the user in sfdc statsd.incr('news.tasks.process_subhub_event.customer_created.created') sfdc.add(contact_data)
def amo_sync(request, post_type): if post_type not in AMO_SYNC_TYPES: return HttpResponseJSON( { "status": "error", "desc": "API URL not found", "code": errors.BASKET_USAGE_ERROR, }, 404, ) if not has_valid_api_key(request): return HttpResponseJSON( { "status": "error", "desc": "requires a valid API-key", "code": errors.BASKET_AUTH_ERROR, }, 401, ) try: data = json.loads(request.body) except ValueError: statsd.incr(f"amo_sync.{post_type}.message.json_error") with sentry_sdk.configure_scope() as scope: scope.set_extra("request.body", request.body) sentry_sdk.capture_exception() return HttpResponseJSON( { "status": "error", "desc": "JSON error", "code": errors.BASKET_USAGE_ERROR, }, 400, ) AMO_SYNC_TYPES[post_type].delay(data) return HttpResponseJSON({"status": "ok"})
def unindex_task(cls, id_list, **kw): """Unindex documents specified by cls and ids""" statsd.incr("search.tasks.unindex_task.%s" % cls.get_mapping_type_name()) try: # Pin to master db to avoid replication lag issues and stale # data. pin_this_thread() for id_ in id_list: cls.unindex(id_) except Exception as exc: retries = unindex_task.request.retries if retries >= MAX_RETRIES: # Some exceptions aren't pickleable and we need this to # throw things that are pickleable. raise IndexingTaskError() statsd.incr("search.tasks.unindex_task.retry", 1) statsd.incr("search.tasks.unindex_task.retry%d" % RETRY_TIMES[retries], 1) unindex_task.retry(exc=exc, max_retries=MAX_RETRIES, countdown=RETRY_TIMES[retries]) finally: unpin_this_thread()
def add_tweet(t): from ..main.models import NowPost r = NowPost.objects.filter(service='twitter', service_id=t.id_str) if r.exists(): print("existing twitter post") return try: np = NowPost.objects.create_twitter( screen_name=t.user.screen_name, service_id=t.id_str, text=t.text, created=t.created_at.isoformat(), original_json=t._json, ) print(t.created_at.isoformat()) print("new twitter post added") print(np.id) process_extended_attributes(t, np) statsd.incr('tweets.add.success') except Exception as e: print("failed with exception: " + str(e)) statsd.incr('tweets.add.failed')
def trans_start_url(request): """ JSON handler to get the Bango payment URL to start a transaction. """ try: statsd.incr('purchase.payment_time.retry') with statsd.timer('purchase.payment_time.get_transaction'): trans = solitude.get_transaction(request.session['trans_id']) except ObjectDoesNotExist: log.error('trans_start_url() transaction does not exist: {t}'.format( t=request.session['trans_id'])) trans = {'status': None} data = {'url': None, 'status': trans['status']} if trans['status'] == constants.STATUS_PENDING: statsd.incr('purchase.payment_time.success') payment_start = request.session.get('payment_start', False) if payment_start: delta = int((time.time() - float(payment_start)) * 1000) statsd.timing('purchase.payment_time.duration', delta) data['url'] = get_payment_url(trans) return data
def reply(request, forum_slug, thread_id): """Reply to a thread.""" forum = get_object_or_404(Forum, slug=forum_slug) user = request.user if not forum.allows_posting_by(user): if forum.allows_viewing_by(user): raise PermissionDenied else: raise Http404 form = ReplyForm(request.POST) post_preview = None if form.is_valid(): thread = get_object_or_404(Thread, pk=thread_id, forum=forum) if not thread.is_locked: reply_ = form.save(commit=False) reply_.thread = thread reply_.author = request.user if 'preview' in request.POST: post_preview = reply_ post_preview.author_post_count = \ reply_.author.post_set.count() elif not is_ratelimited(request, 'forum-post', '15/d'): reply_.save() statsd.incr('forums.reply') # Subscribe the user to the thread. if Setting.get_for_user(request.user, 'forums_watch_after_reply'): NewPostEvent.notify(request.user, thread) # Send notifications to thread/forum watchers. NewPostEvent(reply_).fire(exclude=reply_.author) return HttpResponseRedirect(thread.get_last_post_url()) return posts(request, forum_slug, thread_id, form, post_preview, is_reply=True)
def edit_ng_report(request, display_name='', year=None, month=None, day=None, id=None): user = request.user created = False initial = {} if not id: report = NGReport() created = True initial = {'location': '%s, %s, %s' % (user.userprofile.city, user.userprofile.region, user.userprofile.country), 'latitude': user.userprofile.lat, 'longitude': user.userprofile.lon} else: report = get_object_or_404(NGReport, pk=id, user__userprofile__display_name=display_name) if not created and report.activity.name in UNLISTED_ACTIVITIES: messages.warning(request, 'You cannot edit this report.') return redirect(report.get_absolute_url()) report_form = forms.NGReportForm(request.POST or None, instance=report, initial=initial) if report_form.is_valid(): if created: report.user = user messages.success(request, 'Report successfully created.') statsd.incr('reports.create_report') else: messages.success(request, 'Report successfully updated.') statsd.incr('reports.edit_report') report_form.save() return redirect(report.get_absolute_url()) return render(request, 'edit_ng_report.jinja', {'report_form': report_form, 'pageuser': user, 'report': report, 'created': created, 'campaign_trigger': ACTIVITY_CAMPAIGN})