def get_paykey(data): """ Gets a paykey from Paypal. Need to pass in the following in data: pattern: the reverse pattern to resolve email: who the money is going to (required) amount: the amount of money (required) ip: ip address of end user (required) uuid: contribution_uuid (required) memo: any nice message qs: anything you want to append to the complete or cancel(optional) """ complete = reverse(data['pattern'], args=[data['slug'], 'complete']) cancel = reverse(data['pattern'], args=[data['slug'], 'cancel']) qs = {'uuid': data['uuid']} if 'qs' in data: qs.update(data['qs']) uuid_qs = urllib.urlencode(qs) paypal_data = { 'actionType': 'PAY', 'currencyCode': 'USD', 'cancelUrl': absolutify('%s?%s' % (cancel, uuid_qs)), 'returnUrl': absolutify('%s?%s' % (complete, uuid_qs)), 'trackingId': data['uuid'], 'ipnNotificationUrl': absolutify(reverse('amo.paypal'))} receivers = (data.get('chains', ()), data['email'], data['amount'], data['uuid']) if data.get('preapproval'): # The paypal_key might be empty if they have removed it. key = data['preapproval'].paypal_key if key: paypal_log.info('Using preapproval: %s' % data['preapproval'].pk) paypal_data['preapprovalKey'] = key paypal_data.update(add_receivers(*receivers, preapproval=True)) else: paypal_data.update(add_receivers(*receivers)) if data.get('memo'): paypal_data['memo'] = data['memo'] try: with statsd.timer('paypal.paykey.retrieval'): response = _call(settings.PAYPAL_PAY_URL + 'Pay', paypal_data, ip=data['ip']) except PreApprovalError, e: # Let's retry just once without preapproval. paypal_log.error('Failed using preapproval, reason: %s' % e) # Now it's not a pre-approval, make sure we get the # DIGITALGOODS setting back in there. del paypal_data['preapprovalKey'] paypal_data.update(add_receivers(*receivers)) # If this fails, we won't try again, just fail. with statsd.timer('paypal.paykey.retrieval'): response = _call(settings.PAYPAL_PAY_URL + 'Pay', paypal_data, ip=data['ip'])
def refund_permission_url(addon, dest='payments'): """ Send permissions request to PayPal for refund privileges on this addon's paypal account. Returns URL on PayPal site to visit. """ # This is set in settings_test so we don't start calling PayPal # by accident. Explicitly set this in your tests. if not settings.PAYPAL_PERMISSIONS_URL: return '' paypal_log.debug('Getting refund permission URL for addon: %s' % addon.pk) with statsd.timer('paypal.permissions.url'): url = urlparams(reverse('devhub.addons.acquire_refund_permission', args=[addon.slug]), dest=dest) try: r = _call(settings.PAYPAL_PERMISSIONS_URL + 'RequestPermissions', { 'scope': 'REFUND', 'callback': absolutify(url) }) except PaypalError, e: paypal_log.debug('Error on refund permission URL addon: %s, %s' % (addon.pk, e)) if 'malformed' in str(e): # PayPal is very picky about where they redirect users to. # If you try and create a PayPal permissions URL on a # zamboni that has a non-standard port number or a # non-standard TLD, it will blow up with an error. We need # to be able to at least visit these pages and alter them # in dev, so this will give you a broken token that doesn't # work, but at least the page will function. r = {'token': 'wont-work-paypal-doesnt-like-your-domain'} else: raise
def refund(paykey): """ Refund a payment. Arguments: transaction id of payment to refund Returns: A list of dicts containing the refund info for each receiver of the original payment. """ OK_STATUSES = ['REFUNDED', 'REFUNDED_PENDING'] with statsd.timer('paypal.payment.refund'): try: response = _call(settings.PAYPAL_PAY_URL + 'Refund', {'payKey': paykey}) except PaypalError: paypal_log.error('Refund error', exc_info=True) raise responses = [] for k in response: g = re.match('refundInfoList.refundInfo\((\d+)\).(.*)', k) if g: i = int(g.group(1)) subkey = g.group(2) while i >= len(responses): responses.append({}) responses[i][subkey] = response[k] for d in responses: if d['refundStatus'] not in OK_STATUSES: raise PaypalError('Bad refund status for %s: %s' % (d['receiver.email'], d['refundStatus'])) paypal_log.debug('Refund successful for: %s, %s, %s' % (paykey, d['receiver.email'], d['refundStatus'])) return responses
def application(environ, start_response): status = '200 OK' with statsd.timer('services.verify'): data = environ['wsgi.input'].read() try: addon_id = id_re.search(environ['PATH_INFO']).group('addon_id') except AttributeError: output = '' log_info({'receipt': '%s...' % data[:10], 'addon': 'empty'}, 'Wrong url %s' % environ['PATH_INFO'][:20]) start_response('500 Internal Server Error', []) return [output] try: verify = Verify(addon_id, data, environ) output = verify() start_response(status, verify.get_headers(len(output))) receipt_cef.log(environ, addon_id, 'verify', 'Receipt verification') except: output = '' log_exception({'receipt': '%s...' % data[:10], 'addon': addon_id}) receipt_cef.log(environ, addon_id, 'verify', 'Receipt verification error') start_response('500 Internal Server Error', []) return [output]
def get_queuelen(self, queue_name): """ Returns the current queue length. :param str queue_name: an existing XQueue queue name :raises: :class:`BadQueueName` if the supplied ``queue_name`` is invalid """ log.debug('Fetching queue length for "{}"'.format(queue_name)) url = urlparse.urljoin(self.url, "/xqueue/get_queuelen/") params = {"queue_name": queue_name} with statsd.timer("bux_grader_framework.xqueue.get_queuelen"): success, content = self._request(url, "get", params=params) if not success: error_msg = "Could not get queue length: {}".format(content) if content.startswith("Valid queue names are"): raise BadQueueName(error_msg) else: raise XQueueException(error_msg) queuelen = int(content) log.debug('Retrieved queue length for "{}": {}'.format(queue_name, queuelen)) return queuelen
def authenticate(self, request=None, assertion=None): """Authentication based on BrowserID assertion. ``django.contrib.auth`` backend that is SASL and BrowserID savy. Uses session to maintain assertion over multiple requests. """ if not (request and assertion): return None store_assertion(request, assertion) directory = UserSession(request) with statsd.timer('larper.sasl_bind_time'): (registered, details) = _get_registered_user(directory, request) if registered: person = directory.get_by_unique_id(details) defaults = dict(username=person.username, first_name=person.first_name, last_name=person.last_name, email=person.username) user, created = User.objects.get_or_create( username=person.username, defaults=defaults) if created: user.set_unusable_password() user.save() return user return None
def refund_permission_url(addon): """ Send permissions request to PayPal for refund privileges on this addon's paypal account. Returns URL on PayPal site to visit. """ # This is set in settings_test so we don't start calling PayPal # by accident. Explicitly set this in your tests. if not settings.PAYPAL_PERMISSIONS_URL: return '' paypal_log.debug('Getting refund permission URL for addon: %s' % addon.pk) with statsd.timer('paypal.permissions.url'): url = reverse('devhub.addons.acquire_refund_permission', args=[addon.slug]) try: r = _call(settings.PAYPAL_PERMISSIONS_URL + 'RequestPermissions', {'scope': 'REFUND', 'callback': absolutify(url)}) except PaypalError, e: paypal_log.debug('Error on refund permission URL addon: %s, %s' % (addon.pk, e)) if 'malformed' in str(e): # PayPal is very picky about where they redirect users to. # If you try and create a PayPal permissions URL on a # zamboni that has a non-standard port number or a # non-standard TLD, it will blow up with an error. We need # to be able to at least visit these pages and alter them # in dev, so this will give you a broken token that doesn't # work, but at least the page will function. r = {'token': 'wont-work-paypal-doesnt-like-your-domain'} else: raise
def wiki_to_html(wiki_markup, locale=settings.WIKI_DEFAULT_LANGUAGE, doc_id=None): """Wiki Markup -> HTML with the wiki app's enhanced parser""" with statsd.timer('wiki.render'): content = WikiParser(doc_id=doc_id).parse(wiki_markup, show_toc=False, locale=locale) return content
def monitor(request, format=None): # For each check, a boolean pass/fail status to show in the template status_summary = {} results = {} checks = ['memcache', 'libraries', 'elastic', 'path', 'redis', 'hera'] for check in checks: with statsd.timer('monitor.%s' % check) as timer: status, result = getattr(monitors, check)() status_summary[check] = status results['%s_results' % check] = result results['%s_timer' % check] = timer.ms # If anything broke, send HTTP 500. status_code = 200 if all(status_summary.values()) else 500 if format == '.json': return http.HttpResponse(json.dumps(status_summary), status=status_code) ctx = {} ctx.update(results) ctx['status_summary'] = status_summary return jingo.render(request, 'services/monitor.html', ctx, status=status_code)
def usernames(request): """An API to provide auto-complete data for user names.""" term = request.GET.get('term', '') query = request.GET.get('query', '') pre = term or query if not pre: return [] if not request.user.is_authenticated(): return [] with statsd.timer('users.api.usernames.search'): profiles = ( Profile.objects.filter(Q(name__istartswith=pre)) .values_list('user_id', flat=True)) users = ( User.objects.filter( Q(username__istartswith=pre) | Q(id__in=profiles)) .extra(select={'length': 'Length(username)'}) .order_by('length').select_related('profile')) if not waffle.switch_is_active('users-dont-limit-by-login'): last_login = datetime.now() - timedelta(weeks=12) users = users.filter(last_login__gte=last_login) return [{'username': u.username, 'display_name': display_name_or_none(u)} for u in users[:10]]
def usernames(request): """An API to provide auto-complete data for user names.""" term = request.GET.get('term', '') query = request.GET.get('query', '') pre = term or query if not pre: return [] if not request.user.is_authenticated(): return [] with statsd.timer('users.api.usernames.search'): profiles = Profile.objects.filter( Q(name__istartswith=pre)).values_list('user_id', flat=True) users = User.objects.filter( Q(username__istartswith=pre) | Q(id__in=profiles), ).extra(select={ 'length': 'Length(username)' }).order_by('length').select_related('profile') if not waffle.switch_is_active('users-dont-limit-by-login'): last_login = datetime.now() - timedelta(weeks=12) users = users.filter(last_login__gte=last_login) return [{ 'username': u.username, 'display_name': display_name_or_none(u) } for u in users[:10]]
def collect_tweets(): """Collect new tweets about Firefox.""" with statsd.timer('customercare.tweets.time_elapsed'): auth = tweepy.OAuthHandler(settings.TWITTER_CONSUMER_KEY, settings.TWITTER_CONSUMER_SECRET, secure=True) auth.set_access_token(settings.TWITTER_ACCESS_TOKEN, settings.TWITTER_ACCESS_TOKEN_SECRET) api = tweepy.API(auth, parser=RawParser()) search_options = { 'q': 'firefox OR #fxinput OR @firefoxbrasil OR #firefoxos', 'rpp': settings.CC_TWEETS_PERPAGE, # Items per page. 'result_type': 'recent', # Retrieve tweets by date. } # If we already have some tweets, collect nothing older than what we # have. try: latest_tweet = Tweet.latest() except Tweet.DoesNotExist: log.debug('No existing tweets. Retrieving %d tweets from search.' % ( settings.CC_TWEETS_PERPAGE)) else: search_options['since_id'] = latest_tweet.tweet_id log.info('Retrieving tweets with id >= %s' % latest_tweet.tweet_id) # Retrieve Tweets try: raw_data = json.loads(str(api.search(**search_options))) except tweepy.TweepError, e: log.warning('Twitter request failed: %s' % e) return if not ('results' in raw_data and raw_data['results']): # Twitter returned 0 results. return # Drop tweets into DB for item in raw_data['results']: # Apply filters to tweet before saving # Allow links in #fxinput tweets statsd.incr('customercare.tweet.collected') item = _filter_tweet(item, allow_links='#fxinput' in item['text']) if not item: continue created_date = datetime.utcfromtimestamp(calendar.timegm( rfc822.parsedate(item['created_at']))) item_lang = item.get('iso_language_code', 'en') tweet = Tweet(tweet_id=item['id'], raw_json=json.dumps(item), locale=item_lang, created=created_date) try: tweet.save() statsd.incr('customercare.tweet.saved') except IntegrityError: pass
def application(environ, start_response): status = "200 OK" with statsd.timer("services.verify"): data = environ["wsgi.input"].read() try: addon_id = id_re.search(environ["PATH_INFO"]).group("addon_id") except AttributeError: output = "" log_info({"receipt": "%s..." % data[:10], "addon": "empty"}, "Wrong url %s" % environ["PATH_INFO"][:20]) start_response("500 Internal Server Error", []) return [output] try: verify = Verify(addon_id, data, environ) output = verify() start_response(status, verify.get_headers(len(output))) receipt_cef.log(environ, addon_id, "verify", "Receipt verification") except: output = "" log_exception({"receipt": "%s..." % data[:10], "addon": addon_id}) receipt_cef.log(environ, addon_id, "verify", "Receipt verification error") start_response("500 Internal Server Error", []) return [output]
def process_response(self, request, response): if (request.META.get('HTTP_X_PJAX') and response.status_code == 200 and 'html' in response.get('content-type', '').lower()): # TODO(Kumar) cache this. with statsd.timer('pjax.parse'): tree = lxml.html.document_fromstring(response.content) # HTML is encoded as ascii with entity refs for non-ascii. html = [] found_pjax = False for elem in tree.cssselect('title,%s' % settings.PJAX_SELECTOR): if elem.tag == 'title': # Inject a <title> for jquery-pjax html.append(lxml.html.tostring(elem, encoding=None)) else: found_pjax = True if elem.text: html.append(elem.text.encode('ascii', 'xmlcharrefreplace')) for ch in elem.iterchildren(): html.append(lxml.html.tostring(ch, encoding=None)) if not found_pjax: msg = ('pjax response for %s does not contain selector %r' % (request.path, settings.PJAX_SELECTOR)) if settings.DEBUG: # Tell the developer the template is bad. raise ValueError(msg) else: pjax_log.error(msg) return response response.content = ''.join(html) return response
def monitor(request, format=None): # For each check, a boolean pass/fail status to show in the template status_summary = {} results = {} checks = ["memcache", "libraries", "elastic", "path", "redis", "hera"] for check in checks: with statsd.timer("monitor.%s" % check) as timer: status, result = getattr(monitors, check)() status_summary[check] = status results["%s_results" % check] = result results["%s_timer" % check] = timer.ms # If anything broke, send HTTP 500. status_code = 200 if all(status_summary.values()) else 500 if format == ".json": return http.HttpResponse(json.dumps(status_summary), status=status_code) ctx = {} ctx.update(results) ctx["status_summary"] = status_summary return jingo.render(request, "services/monitor.html", ctx, status=status_code)
def get_submission(self, queue_name): """ Pop a submission off of XQueue. :param str queue_name: an existing XQueue queue name :raises: :class:`BadQueueName` if the supplied ``queue_name`` is invalid Returns a submission :class:`dict` or :class:`None`. """ log.debug('Fetching submission from "{}"'.format(queue_name)) url = urlparse.urljoin(self.url, "/xqueue/get_submission/") params = {"queue_name": queue_name} with statsd.timer("bux_grader_framework.xqueue.get_submission"): success, content = self._request(url, "get", params=params) if not success: error_msg = "Could not get submission: {}".format(content) if self.QUEUE_NOT_FOUND_MSG % queue_name == content: raise BadQueueName(error_msg) elif self.EMPTY_QUEUE_MSG % queue_name == content: return None else: raise XQueueException(error_msg) # Convert response string to dicts submission = json.loads(content) header, body, files = self._parse_xrequest(submission) log.debug('Retrieved submission from "{}": {}'.format(queue_name, submission)) return {"xqueue_header": header, "xqueue_body": body, "xqueue_files": files}
def authenticate(self, request=None, assertion=None): """Authentication based on BrowserID assertion. ``django.contrib.auth`` backend that is SASL and BrowserID savy. Uses session to maintain assertion over multiple requests. """ if not (request and assertion): return None store_assertion(request, assertion) directory = UserSession(request) with statsd.timer('larper.sasl_bind_time'): (registered, details) = _get_registered_user(directory, request) if registered: person = directory.get_by_unique_id(details) defaults = dict(username=person.username, first_name=person.first_name, last_name=person.last_name, email=person.username) user, created = User.objects.get_or_create(username=person.username, defaults=defaults) if created: user.set_unusable_password() user.save() return user return None
def execute_query(self, db, stmt): """ Execute the SQL query :param db: a MySQLdb connection object :param string stmt: the SQL query to run :raises InvalidQuery: if the query could not be executed """ timer = statsd.timer('bux_sql_grader.execute_query').start() cursor = db.cursor() try: cursor.execute(stmt) rows = cursor.fetchall() cols = () if cursor.description: # Cursor descriptions are not returned as unicode by # MySQLdb so we convert them to support unicode chars in # column headings. cols = tuple(unicode(col[0], 'utf-8') for col in cursor.description) cursor.close() except (OperationalError, Warning, Error) as e: msg = e.args[1] code = e.args[0] raise InvalidQuery("MySQL Error {}: {}".format(code, msg)) finally: timer.stop() return cols, rows
def upload_results(self, results, path, message=None): """ Upload query results CSV to Amazon S3 :param tuple results: query results for upload :param str path: bucket path :param str message: text to display before download link :return: link text on successful upload, failure message if s3 upload failed """ timer = statsd.timer('bux_sql_grader.upload_results').start() if not message: message = "Download full results" # Convert result rows to CSV csv_results = self.csv_results(results) # Upload to S3 s3_url = self.upload_to_s3(csv_results, path) if s3_url: context = {"url": xml_escape(s3_url), "message": xml_escape(message), "icon_src": xml_escape(self.download_icon)} download_link = DOWNLOAD_LINK.substitute(context) else: download_link = UPLOAD_FAILED_MESSAGE timer.stop() return download_link
def application(environ, start_response): status = '200 OK' with statsd.timer('services.verify'): data = environ['wsgi.input'].read() try: addon_id = id_re.search(environ['PATH_INFO']).group('addon_id') except AttributeError: output = '' log_info({ 'receipt': '%s...' % data[:10], 'addon': 'empty' }, 'Wrong url %s' % environ['PATH_INFO'][:20]) start_response('500 Internal Server Error', []) return [output] try: verify = Verify(addon_id, data, environ) output = verify() start_response(status, verify.get_headers(len(output))) receipt_cef.log(environ, addon_id, 'verify', 'Receipt verification') except: output = '' log_exception({'receipt': '%s...' % data[:10], 'addon': addon_id}) receipt_cef.log(environ, addon_id, 'verify', 'Receipt verification error') start_response('500 Internal Server Error', []) return [output]
def packager(data, feature_set, **kw): """Build an add-on based on input data.""" log.info("[1@None] Packaging add-on") from devhub.views import packager_path dest = packager_path(data["slug"]) with guard(u"devhub.packager.%s" % dest) as locked: if locked: log.error(u"Packaging in progress: %s" % dest) return with statsd.timer("devhub.packager"): from packager.main import packager log.info("Starting packaging: %s" % dest) features = set([k for k, v in feature_set.items() if v]) try: packager(data, dest, features) except Exception, err: log.error(u"Failed to package add-on: %s" % err) raise if os.path.exists(dest): log.info(u"Package saved: %s" % dest)
def collect_tweets(): # Don't (ab)use the twitter API from dev and stage. if settings.STAGE: return """Collect new tweets about Firefox.""" with statsd.timer('customercare.tweets.time_elapsed'): t = Twython(settings.TWITTER_CONSUMER_KEY, settings.TWITTER_CONSUMER_SECRET, settings.TWITTER_ACCESS_TOKEN, settings.TWITTER_ACCESS_TOKEN_SECRET) search_options = { 'q': ('firefox OR #fxinput OR @firefoxbrasil OR #firefoxos ' 'OR @firefox_es'), 'count': settings.CC_TWEETS_PERPAGE, # Items per page. 'result_type': 'recent', # Retrieve tweets by date. } # If we already have some tweets, collect nothing older than what we # have. try: latest_tweet = Tweet.latest() except Tweet.DoesNotExist: log.debug('No existing tweets. Retrieving %d tweets from search.' % settings.CC_TWEETS_PERPAGE) else: search_options['since_id'] = latest_tweet.tweet_id log.info('Retrieving tweets with id >= %s' % latest_tweet.tweet_id) # Retrieve Tweets results = t.search(**search_options) if len(results['statuses']) == 0: # Twitter returned 0 results. return # Drop tweets into DB for item in results['statuses']: # Apply filters to tweet before saving # Allow links in #fxinput tweets statsd.incr('customercare.tweet.collected') item = _filter_tweet(item, allow_links='#fxinput' in item['text']) if not item: continue created_date = datetime.utcfromtimestamp(calendar.timegm( rfc822.parsedate(item['created_at']))) item_lang = item['metadata'].get('iso_language_code', 'en') tweet = Tweet(tweet_id=item['id'], raw_json=json.dumps(item), locale=item_lang, created=created_date) try: tweet.save() statsd.incr('customercare.tweet.saved') except IntegrityError: pass
def _paypal(request): # raw_post_data has to be accessed before request.POST. wtf django? raw, post = request.raw_post_data, request.POST.copy() paypal_log.info('IPN received: %s' % raw) # Check that the request is valid and coming from PayPal. # The order of the params has to match the original request. data = u'cmd=_notify-validate&' + raw with statsd.timer('paypal.validate-ipn'): paypal_response = urllib2.urlopen(settings.PAYPAL_CGI_URL, data, 20).readline() post, transactions = _parse(post) # If paypal doesn't like us, fail. if paypal_response != 'VERIFIED': msg = ("Expecting 'VERIFIED' from PayPal, got '%s'. " "Failing." % paypal_response) _log_error_with_data(msg, post) return http.HttpResponseForbidden('Invalid confirmation') # Cope with subscription events. if post.get('txn_type', '').startswith('subscr_'): SubscriptionEvent.objects.create(post_data=php.serialize(post)) paypal_log.info('Subscription created: %s' % post.get('txn_id', '')) return http.HttpResponse('Success!') payment_status = post.get('payment_status', '').lower() if payment_status != 'completed': return paypal_ignore(request, post) # There could be multiple transactions on the IPN. This will deal # with them appropriately or cope if we don't know how to deal with # any of them. methods = { 'refunded': paypal_refunded, 'completed': paypal_completed, 'reversal': paypal_reversal } result = None called = False for key, value in transactions.items(): status = value.get('status', '').lower() if status not in methods: paypal_log.info('Unknown status: %s' % status) continue result = methods[status](request, post, value) called = True if not called: # Whilst the payment status was completed, it contained # no transactions with status, which means we don't know # how to process it. Hence it's being ignored. return paypal_ignore(request, post) if not result: return _log_unmatched(post) return result
def decode_receipt(receipt): """ Cracks the receipt using the private key. This will probably change to using the cert at some point, especially when we get the HSM. """ with statsd.timer('services.decode'): key = jwt.rsa_load(settings.WEBAPPS_RECEIPT_KEY) raw = jwt.decode(receipt, key) return raw
def get_permissions_token(request_token, verification_code): """ Send request for permissions token, after user has granted the requested permissions via the PayPal page we redirected them to. """ with statsd.timer('paypal.permissions.token'): r = _call(settings.PAYPAL_PERMISSIONS_URL + 'GetAccessToken', {'token': request_token, 'verifier': verification_code}) return r['token']
def run_validator(file_path, for_appversions=None, test_all_tiers=False, overrides=None): """A pre-configured wrapper around the addon validator. *file_path* Path to addon / extension file to validate. *for_appversions=None* An optional dict of application versions to validate this addon for. The key is an application GUID and its value is a list of versions. *test_all_tiers=False* When False (default) the validator will not continue if it encounters fatal errors. When True, all tests in all tiers are run. See bug 615426 for discussion on this default. *overrides=None* Normally the validator gets info from install.rdf but there are a few things we need to override. See validator for supported overrides. Example: {'targetapp_maxVersion': {'<app guid>': '<version>'}} To validate the addon for compatibility with Firefox 5 and 6, you'd pass in:: for_appversions={amo.FIREFOX.guid: ['5.0.*', '6.0.*']} Not all application versions will have a set of registered compatibility tests. """ from validator.validate import validate # TODO(Kumar) remove this when validator is fixed, see bug 620503 from validator.testcases import scripting scripting.SPIDERMONKEY_INSTALLATION = settings.SPIDERMONKEY import validator.constants validator.constants.SPIDERMONKEY_INSTALLATION = settings.SPIDERMONKEY apps = dump_apps.Command.JSON_PATH if not os.path.exists(apps): call_command("dump_apps") with statsd.timer("devhub.validator"): return validate( file_path, for_appversions=for_appversions, format="json", # When False, this flag says to stop testing after one # tier fails. determined=test_all_tiers, approved_applications=apps, spidermonkey=settings.SPIDERMONKEY, overrides=overrides, timeout=settings.VALIDATOR_TIMEOUT, )
def _paypal(request): # raw_post_data has to be accessed before request.POST. wtf django? raw, post = request.raw_post_data, request.POST.copy() paypal_log.info('IPN received: %s' % raw) # Check that the request is valid and coming from PayPal. # The order of the params has to match the original request. data = u'cmd=_notify-validate&' + raw with statsd.timer('paypal.validate-ipn'): paypal_response = urllib2.urlopen(settings.PAYPAL_CGI_URL, data, 20).readline() post, transactions = _parse(post) # If paypal doesn't like us, fail. if paypal_response != 'VERIFIED': msg = ("Expecting 'VERIFIED' from PayPal, got '%s'. " "Failing." % paypal_response) _log_error_with_data(msg, post) return http.HttpResponseForbidden('Invalid confirmation') # Cope with subscription events. if post.get('txn_type', '').startswith('subscr_'): SubscriptionEvent.objects.create(post_data=php.serialize(post)) paypal_log.info('Subscription created: %s' % post.get('txn_id', '')) return http.HttpResponse('Success!') payment_status = post.get('payment_status', '').lower() if payment_status != 'completed': return paypal_ignore(request, post) # There could be multiple transactions on the IPN. This will deal # with them appropriately or cope if we don't know how to deal with # any of them. methods = {'refunded': paypal_refunded, 'completed': paypal_completed, 'reversal': paypal_reversal} result = None called = False for key, value in transactions.items(): status = value.get('status', '').lower() if status not in methods: paypal_log.info('Unknown status: %s' % status) continue result = methods[status](request, post, value) called = True if not called: # Whilst the payment status was completed, it contained # no transactions with status, which means we don't know # how to process it. Hence it's being ignored. return paypal_ignore(request, post) if not result: return _log_unmatched(post) return result
def collect_tweets(): # Don't (ab)use the twitter API from dev and stage. if settings.STAGE: return """Collect new tweets about Firefox.""" with statsd.timer("customercare.tweets.time_elapsed"): t = Twython( settings.TWITTER_CONSUMER_KEY, settings.TWITTER_CONSUMER_SECRET, settings.TWITTER_ACCESS_TOKEN, settings.TWITTER_ACCESS_TOKEN_SECRET, ) search_options = { "q": ("firefox OR #fxinput OR @firefoxbrasil OR #firefoxos " "OR @firefox_es"), "count": settings.CC_TWEETS_PERPAGE, # Items per page. "result_type": "recent", # Retrieve tweets by date. } # If we already have some tweets, collect nothing older than what we # have. try: latest_tweet = Tweet.latest() except Tweet.DoesNotExist: log.debug("No existing tweets. Retrieving %d tweets from search." % settings.CC_TWEETS_PERPAGE) else: search_options["since_id"] = latest_tweet.tweet_id log.info("Retrieving tweets with id >= %s" % latest_tweet.tweet_id) # Retrieve Tweets results = t.search(**search_options) if len(results["statuses"]) == 0: # Twitter returned 0 results. return # Drop tweets into DB for item in results["statuses"]: # Apply filters to tweet before saving # Allow links in #fxinput tweets statsd.incr("customercare.tweet.collected") item = _filter_tweet(item, allow_links="#fxinput" in item["text"]) if not item: continue created_date = datetime.utcfromtimestamp(calendar.timegm(rfc822.parsedate(item["created_at"]))) item_lang = item["metadata"].get("iso_language_code", "en") tweet = Tweet(tweet_id=item["id"], raw_json=json.dumps(item), locale=item_lang, created=created_date) try: tweet.save() statsd.incr("customercare.tweet.saved") except IntegrityError: pass
def verify_receipt(request, addon): """Returns the status for that addon.""" with statsd.timer('marketplace.verification'): #TODO(andym): not sure what to do about refunded yet. if addon.type != amo.ADDON_WEBAPP: return http.HttpResponse(status=400) # If wanted we can use the watermark hash, however it's assumed the # users will be logged into AMO. exists = addon.has_purchased(request.amo_user) return {'status': 'ok' if exists else 'invalid'}
def collect_tweets(): """Collect new tweets about Firefox.""" with statsd.timer('customercare.tweets.time_elapsed'): search_options = { 'q': 'firefox OR #fxinput', 'rpp': settings.CC_TWEETS_PERPAGE, # Items per page. 'result_type': 'recent', # Retrieve tweets by date. } # If we already have some tweets, collect nothing older than what we have. try: latest_tweet = Tweet.latest() except Tweet.DoesNotExist: log.debug('No existing tweets. Retrieving %d tweets from search.' % (settings.CC_TWEETS_PERPAGE)) else: search_options['since_id'] = latest_tweet.tweet_id log.info('Retrieving tweets with id >= %s' % latest_tweet.tweet_id) # Retrieve Tweets try: raw_data = json.load( urllib.urlopen('%s?%s' % (SEARCH_URL, urllib.urlencode(search_options)))) except Exception, e: log.warning('Twitter request failed: %s' % e) return if not ('results' in raw_data and raw_data['results']): # Twitter returned 0 results. return # Drop tweets into DB for item in raw_data['results']: # Apply filters to tweet before saving # Allow links in #fxinput tweets statsd.incr('customercare.tweet.collected') item = _filter_tweet(item, allow_links='#fxinput' in item['text']) if not item: continue created_date = datetime.utcfromtimestamp( calendar.timegm(rfc822.parsedate(item['created_at']))) item_lang = item.get('iso_language_code', 'en') tweet = Tweet(tweet_id=item['id'], raw_json=json.dumps(item), locale=item_lang, created=created_date) try: tweet.save() statsd.incr('customercare.tweet.saved') except IntegrityError: pass
def run_validator(file_path, for_appversions=None, test_all_tiers=False, overrides=None): """A pre-configured wrapper around the addon validator. *file_path* Path to addon / extension file to validate. *for_appversions=None* An optional dict of application versions to validate this addon for. The key is an application GUID and its value is a list of versions. *test_all_tiers=False* When False (default) the validator will not continue if it encounters fatal errors. When True, all tests in all tiers are run. See bug 615426 for discussion on this default. *overrides=None* Normally the validator gets info from install.rdf but there are a few things we need to override. See validator for supported overrides. Example: {'targetapp_maxVersion': {'<app guid>': '<version>'}} To validate the addon for compatibility with Firefox 5 and 6, you'd pass in:: for_appversions={amo.FIREFOX.guid: ['5.0.*', '6.0.*']} Not all application versions will have a set of registered compatibility tests. """ from validator.validate import validate # TODO(Kumar) remove this when validator is fixed, see bug 620503 from validator.testcases import scripting scripting.SPIDERMONKEY_INSTALLATION = settings.SPIDERMONKEY import validator.constants validator.constants.SPIDERMONKEY_INSTALLATION = settings.SPIDERMONKEY apps = dump_apps.Command.JSON_PATH if not os.path.exists(apps): call_command('dump_apps') with statsd.timer('devhub.validator'): return validate(file_path, for_appversions=for_appversions, format='json', # When False, this flag says to stop testing after one # tier fails. determined=test_all_tiers, approved_applications=apps, spidermonkey=settings.SPIDERMONKEY, overrides=overrides, timeout=settings.VALIDATOR_TIMEOUT)
def raw(self): qs = self._build_query() es = elasticutils.get_es() try: with statsd.timer('search.es.timer') as timer: hits = es.search(qs, self.index, self.type._meta.db_table) except Exception: log.error(qs) raise statsd.timing('search.es.took', hits['took']) log.debug('[%s] [%s] %s' % (hits['took'], timer.ms, qs)) return hits
def application(environ, start_response): status = '200 OK' with statsd.timer('services.pfs'): data = dict(parse_qsl(environ['QUERY_STRING'])) try: output = get_output(data) start_response(status, get_headers(len(output))) except: log_exception(data) raise return [output]
def wiki_to_html(wiki_markup, locale=settings.WIKI_DEFAULT_LANGUAGE, doc_id=None, parser_cls=None): """Wiki Markup -> HTML with the wiki app's enhanced parser""" if parser_cls is None: parser_cls = WikiParser with statsd.timer('wiki.render'): with uselocale(locale): content = parser_cls(doc_id=doc_id).parse( wiki_markup, show_toc=False, locale=locale, toc_string=_('Table of Contents')) return content
def sdk_copy(sdk_source, sdk_dir): log.debug("Copying SDK from (%s) to (%s)" % (sdk_source, sdk_dir)) with statsd.timer('xpi.copy'): if os.path.isdir(sdk_dir): for d in os.listdir(sdk_source): s_d = os.path.join(sdk_source, d) if os.path.isdir(s_d): shutil.copytree(s_d, os.path.join(sdk_dir, d)) else: shutil.copy(s_d, sdk_dir) else: shutil.copytree(sdk_source, sdk_dir)
def grade_results(self, student_answer, student_results, grader_answer, grader_results, scale=None): """ Compares student and grader responses to generate a score """ # Generate a score timer = statsd.timer('bux_sql_grader.grade_results').start() scorer = MySQLRubricScorer(student_answer, student_results, grader_answer, grader_results, scale) score, messages = scorer.score() scorer.close() correct = (score == 1) timer.stop() return correct, score, messages
def collect_tweets(): """Collect new tweets about Firefox.""" with statsd.timer('customercare.tweets.time_elapsed'): search_options = { 'q': 'firefox OR #fxinput', 'rpp': settings.CC_TWEETS_PERPAGE, # Items per page. 'result_type': 'recent', # Retrieve tweets by date. } # If we already have some tweets, collect nothing older than what we have. try: latest_tweet = Tweet.latest() except Tweet.DoesNotExist: log.debug('No existing tweets. Retrieving %d tweets from search.' % ( settings.CC_TWEETS_PERPAGE)) else: search_options['since_id'] = latest_tweet.tweet_id log.info('Retrieving tweets with id >= %s' % latest_tweet.tweet_id) # Retrieve Tweets try: raw_data = json.load(urllib.urlopen('%s?%s' % ( SEARCH_URL, urllib.urlencode(search_options)))) except Exception, e: log.warning('Twitter request failed: %s' % e) return if not ('results' in raw_data and raw_data['results']): # Twitter returned 0 results. return # Drop tweets into DB for item in raw_data['results']: # Apply filters to tweet before saving # Allow links in #fxinput tweets statsd.incr('customercare.tweet.collected') item = _filter_tweet(item, allow_links='#fxinput' in item['text']) if not item: continue created_date = datetime.utcfromtimestamp(calendar.timegm( rfc822.parsedate(item['created_at']))) item_lang = item.get('iso_language_code', 'en') tweet = Tweet(tweet_id=item['id'], raw_json=json.dumps(item), locale=item_lang, created=created_date) try: tweet.save() statsd.incr('customercare.tweet.saved') except IntegrityError: pass
def application(environ, start_response): status = '200 OK' with statsd.timer('services.update'): data = dict(parse_qsl(environ['QUERY_STRING'])) compat_mode = data.pop('compatMode', 'strict') try: update = Update(data, compat_mode) output = update.get_rdf() start_response(status, update.get_headers(len(output))) except: #mail_exception(data) log_exception(data) raise return [output]
def check_purchase(paykey): """ When a purchase is complete checks paypal that the purchase has gone through. """ with statsd.timer('paypal.payment.details'): try: response = _call(settings.PAYPAL_PAY_URL + 'PaymentDetails', {'payKey': paykey}) except PaypalError: paypal_log.error('Payment details error', exc_info=True) return False return response['status']
def decode_receipt(receipt): """ Cracks the receipt using the private key. This will probably change to using the cert at some point, especially when we get the HSM. """ with statsd.timer('services.decode'): if settings.SIGNING_SERVER_ACTIVE: verifier = certs.ReceiptVerifier() if not verifier.verify(receipt): raise VerificationError() return jwt.decode(receipt.split('~')[1], verify=False) else: key = jwt.rsa_load(settings.WEBAPPS_RECEIPT_KEY) raw = jwt.decode(receipt, key) return raw
def browserid_login(request): if waffle.switch_is_active('browserid-login'): if request.user.is_authenticated(): return http.HttpResponse(status=200) with statsd.timer('auth.browserid.verify'): profile, msg = browserid_authenticate( request, assertion=request.POST['assertion']) if profile is not None: if profile.needs_tougher_password: return http.HttpResponse("", status=400) auth.login(request, profile.user) comp = (profile.needs_completion() if settings.FORCE_PROFILE_COMPLETION else False) return dict(profile_needs_completion=comp) return http.HttpResponse(msg, status=401)
def get_paykey(data): """ Gets a paykey from Paypal. Need to pass in the following in data: pattern: the reverse pattern to resolve email: who the money is going to (required) amount: the amount of money (required) ip: ip address of end user (required) uuid: contribution_uuid (required) memo: any nice message qs: anything you want to append to the complete or cancel(optional) """ complete = reverse(data['pattern'], args=[data['slug'], 'complete']) cancel = reverse(data['pattern'], args=[data['slug'], 'cancel']) qs = {'uuid': data['uuid']} if 'qs' in data: qs.update(data['qs']) uuid_qs = urllib.urlencode(qs) paypal_data = { 'actionType': 'PAY', 'requestEnvelope.errorLanguage': 'US', 'currencyCode': 'USD', 'cancelUrl': absolutify('%s?%s' % (cancel, uuid_qs)), 'returnUrl': absolutify('%s?%s' % (complete, uuid_qs)), 'receiverList.receiver(0).email': data['email'], 'receiverList.receiver(0).amount': data['amount'], 'receiverList.receiver(0).invoiceID': 'mozilla-%s' % data['uuid'], 'receiverList.receiver(0).primary': 'TRUE', 'receiverList.receiver(0).paymentType': 'DIGITALGOODS', 'trackingId': data['uuid'] } if data.get('ipn', True): paypal_data['ipnNotificationUrl'] = absolutify(reverse('amo.paypal')) if data.get('memo'): paypal_data['memo'] = data['memo'] with statsd.timer('paypal.paykey.retrieval'): try: response = _call(settings.PAYPAL_PAY_URL + 'Pay', paypal_data, ip=data['ip']) except AuthError, error: paypal_log.error('Authentication error: %s' % error) raise
def check_refund_permission(token): """ Asks PayPal whether the PayPal ID for this account has granted refund permission to us. """ # This is set in settings_test so we don't start calling PayPal # by accident. Explicitly set this in your tests. if not settings.PAYPAL_PERMISSIONS_URL: return False paypal_log.debug('Checking refund permission for token: %s..' % token[:10]) try: with statsd.timer('paypal.permissions.refund'): r = _call(settings.PAYPAL_PERMISSIONS_URL + 'GetPermissions', {'token': token}) except PaypalError, error: paypal_log.debug('Paypal returned error for token: %s.. error: %s' % (token[:10], error)) return False
def get_preapproval_key(data): """ Get a preapproval key from PayPal. If this passes, you get a key that you can use in a redirect to PayPal. """ paypal_data = { 'currencyCode': 'USD', 'startingDate': data['startDate'].strftime('%Y-%m-%d'), 'endingDate': data['endDate'].strftime('%Y-%m-%d'), 'maxTotalAmountOfAllPayments': str(data.get('maxAmount', '2000')), 'returnUrl': absolutify(reverse(data['pattern'], args=['complete'])), 'cancelUrl': absolutify(reverse(data['pattern'], args=['cancel'])), } with statsd.timer('paypal.preapproval.token'): response = _call(settings.PAYPAL_PAY_URL + 'Preapproval', paypal_data, ip=data.get('ip')) return response
def usernames(request): """An API to provide auto-complete data for user names.""" term = request.GET.get('term', '') query = request.GET.get('query', '') pre = term or query if not pre: return [] if not request.user.is_authenticated(): return [] with statsd.timer('users.api.usernames.search'): profiles = Profile.objects.filter( Q(name__istartswith=pre)).values_list('user_id', flat=True) users = User.objects.filter( Q(username__istartswith=pre) | Q(id__in=profiles), ).select_related('profile')[:10] return [{ 'username': u.username, 'display_name': display_name_or_none(u) } for u in users]
def application(environ, start_response): start = time() status = '200 OK' timing = (environ['REQUEST_METHOD'], '%s?%s' % (environ['SCRIPT_NAME'], environ['QUERY_STRING'])) with statsd.timer('services.update'): data = dict(parse_qsl(environ['QUERY_STRING'])) try: update = Update(data) output = update.get_rdf() start_response(status, update.get_headers(len(output))) except: timing_log.info('%s "%s" (500) %.2f [ANON]' % (timing[0], timing[1], time() - start)) #mail_exception(data) log_exception(data) raise timing_log.info('%s "%s" (200) %.2f [ANON]' % (timing[0], timing[1], time() - start)) return [output]
def decode_receipt(receipt): """ Cracks the receipt using the private key. This will probably change to using the cert at some point, especially when we get the HSM. """ with statsd.timer('services.decode'): if settings.SIGNING_SERVER_ACTIVE: verifier = certs.ReceiptVerifier() try: result = verifier.verify(receipt) except ExpiredSignatureError: # Until we can do something meaningful with this, just ignore. return jwt.decode(receipt.split('~')[1], verify=False) if not result: raise VerificationError() return jwt.decode(receipt.split('~')[1], verify=False) else: key = jwt.rsa_load(settings.WEBAPPS_RECEIPT_KEY) raw = jwt.decode(receipt, key) return raw
def get_paykey(data): """ Gets a paykey from Paypal. Need to pass in the following in data: pattern: the reverse pattern to resolve email: who the money is going to (required) amount: the amount of money (required) ip: ip address of end user (required) uuid: contribution_uuid (required) memo: any nice message qs: anything you want to append to the complete or cancel(optional) """ complete = reverse(data['pattern'], args=[data['slug'], 'complete']) cancel = reverse(data['pattern'], args=[data['slug'], 'cancel']) qs = {'uuid': data['uuid']} if 'qs' in data: qs.update(data['qs']) uuid_qs = urllib.urlencode(qs) paypal_data = { 'actionType': 'PAY', 'currencyCode': 'USD', 'cancelUrl': absolutify('%s?%s' % (cancel, uuid_qs)), 'returnUrl': absolutify('%s?%s' % (complete, uuid_qs)), 'trackingId': data['uuid'], 'ipnNotificationUrl': absolutify(reverse('amo.paypal'))} paypal_data.update(add_receivers(data.get('chains', ()), data['email'], data['amount'], data['uuid'])) if data.get('memo'): paypal_data['memo'] = data['memo'] with statsd.timer('paypal.paykey.retrieval'): response = _call(settings.PAYPAL_PAY_URL + 'Pay', paypal_data, ip=data['ip']) return response['payKey']