Example #1
0
def run_validator(file_path, url=None):
    """A pre-configured wrapper around the app validator."""

    temp_path = None
    # Make a copy of the file since we can't assume the
    # uploaded file is on the local filesystem.
    temp_path = tempfile.mktemp()
    with open(temp_path, "wb") as local_f:
        with private_storage.open(file_path) as remote_f:
            copyfileobj(remote_f, local_f)

    with statsd.timer("mkt.developers.validator"):
        is_packaged = zipfile.is_zipfile(temp_path)
        if is_packaged:
            log.info(u"Running `validate_packaged_app` for path: %s" % (file_path))
            with statsd.timer("mkt.developers.validate_packaged_app"):
                return validate_packaged_app(
                    temp_path,
                    market_urls=settings.VALIDATOR_IAF_URLS,
                    timeout=settings.VALIDATOR_TIMEOUT,
                    spidermonkey=settings.SPIDERMONKEY,
                )
        else:
            log.info(u"Running `validate_app` for path: %s" % (file_path))
            with statsd.timer("mkt.developers.validate_app"):
                return validate_app(open(temp_path).read(), market_urls=settings.VALIDATOR_IAF_URLS, url=url)

    # Clean up copied files.
    os.unlink(temp_path)
Example #2
0
def run_validator(file_path, url=None):
    """A pre-configured wrapper around the app validator."""

    temp_path = None
    # Make a copy of the file since we can't assume the
    # uploaded file is on the local filesystem.
    temp_path = tempfile.mktemp()
    copy_stored_file(
        file_path, temp_path,
        src_storage=private_storage, dst_storage=local_storage)

    with statsd.timer('mkt.developers.validator'):
        is_packaged = zipfile.is_zipfile(temp_path)
        if is_packaged:
            log.info(u'Running `validate_packaged_app` for path: %s'
                     % (file_path))
            with statsd.timer('mkt.developers.validate_packaged_app'):
                return validate_packaged_app(
                    temp_path,
                    market_urls=settings.VALIDATOR_IAF_URLS,
                    timeout=settings.VALIDATOR_TIMEOUT,
                    spidermonkey=settings.SPIDERMONKEY)
        else:
            log.info(u'Running `validate_app` for path: %s' % (file_path))
            with statsd.timer('mkt.developers.validate_app'):
                return validate_app(open(temp_path).read(),
                                    market_urls=settings.VALIDATOR_IAF_URLS,
                                    url=url)

    # Clean up copied files.
    os.unlink(temp_path)
Example #3
0
def get_paykey(data):
    """
    Gets a paykey from Paypal. Need to pass in the following in data:
    pattern: the reverse pattern to resolve
    email: who the money is going to (required)
    amount: the amount of money (required)
    ip: ip address of end user (required)
    uuid: contribution_uuid (required)
    memo: any nice message (optional)
    qs: anything you want to append to the complete or cancel (optional)
    currency: valid paypal currency, defaults to USD (optional)
    """
    if data["pattern"]:
        complete = reverse(data["pattern"], args=[data["slug"], "complete"])
        cancel = reverse(data["pattern"], args=[data["slug"], "cancel"])
    else:
        # If there's no pattern given, just fake some urls.
        complete = cancel = settings.SITE_URL + "/paypal/dummy/"

    qs = {"uuid": data["uuid"]}
    if "qs" in data:
        qs.update(data["qs"])
    uuid_qs = urllib.urlencode(qs)

    paypal_data = {
        "actionType": "PAY",
        "currencyCode": data.get("currency", "USD"),
        "cancelUrl": absolutify("%s?%s" % (cancel, uuid_qs)),
        "returnUrl": absolutify("%s?%s" % (complete, uuid_qs)),
        "trackingId": data["uuid"],
        "ipnNotificationUrl": absolutify(reverse("amo.paypal")),
    }

    receivers = (data.get("chains", ()), data["email"], data["amount"], data["uuid"])

    if data.get("preapproval"):
        # The paypal_key might be empty if they have removed it.
        key = data["preapproval"].paypal_key
        if key:
            paypal_log.info("Using preapproval: %s" % data["preapproval"].pk)
            paypal_data["preapprovalKey"] = key

    paypal_data.update(add_receivers(*receivers, preapproval="preapprovalKey" in paypal_data))

    if data.get("memo"):
        paypal_data["memo"] = data["memo"]

    try:
        with statsd.timer("paypal.paykey.retrieval"):
            response = _call(settings.PAYPAL_PAY_URL + "Pay", paypal_data, ip=data["ip"])
    except PreApprovalError, e:
        # Let's retry just once without preapproval.
        paypal_log.error("Failed using preapproval, reason: %s" % e)
        # Now it's not a pre-approval, make sure we get the
        # DIGITALGOODS setting back in there.
        del paypal_data["preapprovalKey"]
        paypal_data.update(add_receivers(*receivers))
        # If this fails, we won't try again, just fail.
        with statsd.timer("paypal.paykey.retrieval"):
            response = _call(settings.PAYPAL_PAY_URL + "Pay", paypal_data, ip=data["ip"])
Example #4
0
def get_paykey(data):
    """
    Gets a paykey from Paypal. Need to pass in the following in data:
    pattern: the reverse pattern to resolve
    email: who the money is going to (required)
    amount: the amount of money (required)
    ip: ip address of end user (required)
    uuid: contribution_uuid (required)
    memo: any nice message (optional)
    qs: anything you want to append to the complete or cancel (optional)
    currency: valid paypal currency, defaults to USD (optional)
    """
    complete = reverse(data['pattern'], args=[data['slug'], 'complete'])
    cancel = reverse(data['pattern'], args=[data['slug'], 'cancel'])

    qs = {'uuid': data['uuid']}
    if 'qs' in data:
        qs.update(data['qs'])
    uuid_qs = urllib.urlencode(qs)

    paypal_data = {
        'actionType': 'PAY',
        'currencyCode': data.get('currency', 'USD'),
        'cancelUrl': absolutify('%s?%s' % (cancel, uuid_qs)),
        'returnUrl': absolutify('%s?%s' % (complete, uuid_qs)),
        'trackingId': data['uuid'],
        'ipnNotificationUrl': absolutify(reverse('amo.paypal'))}

    receivers = (data.get('chains', ()), data['email'], data['amount'],
                 data['uuid'])

    if data.get('preapproval'):
        # The paypal_key might be empty if they have removed it.
        key = data['preapproval'].paypal_key
        if key:
            paypal_log.info('Using preapproval: %s' % data['preapproval'].pk)
            paypal_data['preapprovalKey'] = key

    paypal_data.update(add_receivers(*receivers,
                                preapproval='preapprovalKey' in paypal_data))

    if data.get('memo'):
        paypal_data['memo'] = data['memo']

    try:
        with statsd.timer('paypal.paykey.retrieval'):
            response = _call(settings.PAYPAL_PAY_URL + 'Pay', paypal_data,
                             ip=data['ip'])
    except PreApprovalError, e:
        # Let's retry just once without preapproval.
        paypal_log.error('Failed using preapproval, reason: %s' % e)
        # Now it's not a pre-approval, make sure we get the
        # DIGITALGOODS setting back in there.
        del paypal_data['preapprovalKey']
        paypal_data.update(add_receivers(*receivers))
        # If this fails, we won't try again, just fail.
        with statsd.timer('paypal.paykey.retrieval'):
            response = _call(settings.PAYPAL_PAY_URL + 'Pay', paypal_data,
                             ip=data['ip'])
Example #5
0
def get_permission_url(addon, dest, scope):
    """
    Send permissions request to PayPal for privileges on
    this PayPal account. Returns URL on PayPal site to visit.
    Documentation: http://bit.ly/zlhXlT
    """
    # This is set in settings_test so we don't start calling PayPal
    # by accident. Explicitly set this in your tests.
    if not settings.PAYPAL_PERMISSIONS_URL:
        return ''

    paypal_log.debug('Getting refund permission URL for addon: %s' % addon.pk)

    with statsd.timer('paypal.permissions.url'):
        url = urlparams(addon.get_dev_url('acquire_refund_permission'),
                        dest=dest)
        try:
            r = _call(settings.PAYPAL_PERMISSIONS_URL + 'RequestPermissions',
                      {'scope': scope, 'callback': absolutify(url)})
        except PaypalError, e:
            paypal_log.debug('Error on refund permission URL addon: %s, %s' %
                             (addon.pk, e))
            if e.id == '580028':
                # PayPal is very picky about where they redirect users to.
                # If you try and create a PayPal permissions URL on a
                # zamboni that has a non-standard port number or a
                # non-standard TLD, it will blow up with an error. We need
                # to be able to at least visit these pages and alter them
                # in dev, so this will give you a broken token that doesn't
                # work, but at least the page will function.
                r = {'token': 'wont-work-paypal-doesnt-like-your-domain'}
            else:
                raise
Example #6
0
def get_preapproval_key(data):
    """
    Get a preapproval key from PayPal. If this passes, you get a key that
    you can use in a redirect to PayPal.
    """
    paypal_data = {
        'currencyCode': 'USD',
        'startingDate': data['startDate'].strftime('%Y-%m-%d'),
        'endingDate': data['endDate'].strftime('%Y-%m-%d'),
        'maxTotalAmountOfAllPayments': str(data.get('maxAmount', '2000')),
        'returnUrl': absolutify(reverse(data['pattern'], args=['complete'])),
        'cancelUrl': absolutify(reverse(data['pattern'], args=['cancel'])),
    }
    if settings.PAYPAL_LIMIT_PREAPPROVAL:
        paypal_data.update({
            'maxAmountPerPayment': 15,
            'maxNumberOfPaymentsPerPeriod': 15,
            'paymentPeriod': 'DAILY',
        })

    with statsd.timer('paypal.preapproval.token'):
        response = _call(settings.PAYPAL_PAY_URL + 'Preapproval', paypal_data,
                         ip=data.get('ip'))

    return response
Example #7
0
    def process_response(self, request, response):
        if (request.META.get('HTTP_X_PJAX') and
            response.status_code == 200 and
            'html' in response.get('content-type', '').lower()):
            # TODO(Kumar) cache this.
            with statsd.timer('pjax.parse'):
                tree = lxml.html.document_fromstring(response.content)
                # HTML is encoded as ascii with entity refs for non-ascii.
                html = []
                found_pjax = False
                for elem in tree.cssselect('title,%s'
                                           % settings.PJAX_SELECTOR):
                    if elem.tag == 'title':
                        # Inject a <title> for jquery-pjax
                        html.append(lxml.html.tostring(elem, encoding=None))
                    else:
                        found_pjax = True
                        if elem.text:
                            html.append(elem.text.encode('ascii',
                                                         'xmlcharrefreplace'))
                        for ch in elem.iterchildren():
                            html.append(lxml.html.tostring(ch, encoding=None))
                if not found_pjax:
                    msg = ('pjax response for %s does not contain selector %r'
                           % (request.path, settings.PJAX_SELECTOR))
                    if settings.DEBUG:
                        # Tell the developer the template is bad.
                        raise ValueError(msg)
                    else:
                        pjax_log.error(msg)
                        return response

                response.content = ''.join(html)

        return response
Example #8
0
def sign_app(src, dest, reviewer=False):
    """
    Generate a manifest and signature and send signature to signing server to
    be signed.
    """
    active_endpoint = _get_endpoint(reviewer)
    timeout = settings.SIGNED_APPS_SERVER_TIMEOUT

    if not active_endpoint:
        _no_sign(src, dest)
        return

    # Extract necessary info from the archive
    try:
        jar = JarExtractor(
            storage.open(src, 'r'), storage.open(dest, 'w'),
            omit_signature_sections=settings.SIGNED_APPS_OMIT_PER_FILE_SIGS)
    except:
        log.error('Archive extraction failed. Bad archive?', exc_info=True)
        raise SigningError('Archive extraction failed. Bad archive?')

    log.info('App signature contents: %s' % jar.signatures)

    log.info('Calling service: %s' % active_endpoint)
    try:
        with statsd.timer('services.sign.app'):
            response = requests.post(active_endpoint, timeout=timeout,
                                     files={'file': ('zigbert.sf',
                                                     str(jar.signatures))})
    except requests.exceptions.HTTPError, error:
        # Will occur when a 3xx or greater code is returned.
        log.error('Posting to app signing failed: %s, %s' % (
            error.response.status, error))
        raise SigningError('Posting to app signing failed: %s, %s' % (
            error.response.status, error))
Example #9
0
def send(requested):
    """
    Given data from request, call the actual provider server.

    If a provider generates a 500 it is returned as a 502 and does not raise
    an error locally.

    All other responses are returned to the calling application.
    """
    response = HttpResponse()
    method = getattr(requests, requested.pop('method'))

    if not requested['verify']:
        raise ValueError('verify must be a path to a .crt or True')

    try:
        with statsd.timer('solitude-auth.send'):
            log.info('Calling: {0}'.format(requested['url']))
            result = method(requested.pop('url'), **requested)
    except requests.exceptions.RequestException as err:
        log.exception('%s: %s' % (err.__class__.__name__, err))
        # Return exceptions from the provider as a 502, leaving
        # 500 for payments-server-auth errors.
        response.status_code = 502
        return response

    if result.status_code < 200 or result.status_code > 299:
        log.error('Warning response status: {0}'.format(result.status_code))

    response.status_code = result.status_code
    response.content = result.text
    response['Content-Type'] = result.headers['Content-Type']
    return response
Example #10
0
def sign(receipt):
    """
    Send the receipt to the signing service.

    This could possibly be made async via celery.
    """
    destination = settings.SIGNING_SERVER
    # If no destination is set. Just ignore this request.
    if not destination:
        return

    destination += '/1.0/sign'
    timeout = settings.SIGNING_SERVER_TIMEOUT

    receipt_json = json.dumps(receipt)
    log.info('Calling service: %s' % destination)
    log.info('Receipt contents: %s' % receipt_json)
    headers = {'Content-Type': 'application/json'}
    data = receipt if isinstance(receipt, basestring) else receipt_json
    request = urllib2.Request(destination, data, headers)

    try:
        with statsd.timer('services.sign'):
            response = urllib2.urlopen(request, timeout=timeout)
    except urllib2.HTTPError, error:
        # Will occur when a 3xx or greater code is returned
        log.error('Posting to signing failed: %s'
                  % (error.code))
        raise SigningError
Example #11
0
def run_addons_linter(path, listed=True):
    from .utils import fix_addons_linter_output

    args = [
        settings.ADDONS_LINTER_BIN,
        path,
        '--boring',
        '--output=json'
    ]

    if not listed:
        args.append('--self-hosted')

    process = subprocess.Popen(
        args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)

    with statsd.timer('devhub.linter'):
        stdout, stderr = process.communicate()

    if stderr:
        raise ValueError(stderr)

    parsed_data = json.loads(stdout)

    result = json.dumps(fix_addons_linter_output(parsed_data, listed))
    track_validation_stats(result, addons_linter=True)

    return result
Example #12
0
def get_fxa_profile(token, config):
    log.debug('Getting profile [{token}]'.format(token=token))
    with statsd.timer('accounts.fxa.identify.profile'):
        response = requests.get(config['profile_host'] + '/profile', headers={
            'Authorization': 'Bearer {token}'.format(token=token),
        })
    if response.status_code == 200:
        profile = response.json()
        if profile.get('email'):
            log.debug('Got profile {profile} [{token}]'.format(profile=profile,
                                                               token=token))
            return profile
        else:
            log.info('Incomplete profile {profile} [{token}]'.format(
                profile=profile, token=token))
            raise IdentificationError('Profile incomplete for {token}'.format(
                token=token))
    else:
        log.info(
            'Profile returned non-200 status {status} {body} '
            '[{token}]'.format(
                token=token, status=response.status_code,
                body=response.content))
        raise IdentificationError('Could not find profile for {token}'.format(
            token=token))
Example #13
0
def application(environ, start_response):
    """
    Developing locally?

        gunicorn -b 0.0.0.0:7000 -w 12 -k sync -t 90 --max-requests 5000 \
            -n gunicorn-theme_update services.wsgi.theme_update:application

    """

    status = "200 OK"
    with statsd.timer("services.theme_update"):
        data = environ["wsgi.input"].read()
        try:
            locale, id_ = url_re.match(environ["PATH_INFO"]).groups()
            locale = (locale or "en-US").lstrip("/")
            id_ = int(id_)
        except AttributeError:  # URL path incorrect.
            start_response("404 Not Found", [])
            return [""]

        try:
            update = ThemeUpdate(locale, id_, environ.get("QUERY_STRING"))
            output = update.get_json()
            if not output:
                start_response("404 Not Found", [])
                return [""]
            start_response(status, update.get_headers(len(output)))
        except:
            log_exception(environ["PATH_INFO"])
            raise

    return [output]
Example #14
0
def call_signing(file_obj, endpoint):
    """Get the jar signature and send it to the signing server to be signed."""
    # We only want the (unique) temporary file name.
    with tempfile.NamedTemporaryFile() as temp_file:
        temp_filename = temp_file.name

    # Extract jar signature.
    jar = JarExtractor(path=storage.open(file_obj.file_path),
                       outpath=temp_filename,
                       omit_signature_sections=True,
                       extra_newlines=True)

    log.debug(u'File signature contents: {0}'.format(jar.signatures))

    log.debug(u'Calling signing service: {0}'.format(endpoint))
    with statsd.timer('services.sign.addon'):
        response = requests.post(
            endpoint,
            timeout=settings.SIGNING_SERVER_TIMEOUT,
            data={'addon_id': get_id(file_obj.version.addon)},
            files={'file': (u'mozilla.sf', unicode(jar.signatures))})
    if response.status_code != 200:
        msg = u'Posting to add-on signing failed: {0}'.format(response.reason)
        log.error(msg)
        raise SigningError(msg)

    pkcs7 = b64decode(json.loads(response.content)['mozilla.rsa'])
    cert_serial_num = get_signature_serial_number(pkcs7)
    jar.make_signed(pkcs7, sigpath=u'mozilla')
    shutil.move(temp_filename, file_obj.file_path)
    return cert_serial_num
Example #15
0
def trans_start_url(request):
    """
    JSON handler to get the Bango payment URL to start a transaction.
    """
    trans = None
    trans_id = request.session.get('trans_id')
    data = {'url': None, 'status': None}

    if not trans_id:
        log.error('trans_start_url(): no transaction ID in session')
        return http.HttpResponseBadRequest()
    try:
        statsd.incr('purchase.payment_time.retry')
        with statsd.timer('purchase.payment_time.get_transaction'):
            trans = solitude.get_transaction(trans_id)
        data['status'] = trans['status']
        data['provider'] = constants.PROVIDERS_INVERTED[trans['provider']]
    except ObjectDoesNotExist:
        log.error('trans_start_url() transaction does not exist: {t}'
                  .format(t=trans_id))

    if data['status'] == constants.STATUS_PENDING:
        statsd.incr('purchase.payment_time.success')
        payment_start = request.session.get('payment_start', False)
        if payment_start:
            delta = int((time.time() - float(payment_start)) * 1000)
            statsd.timing('purchase.payment_time.duration', delta)
        url = get_payment_url(trans)
        log.info('async call got payment URL {url} for trans {tr}'
                 .format(url=url, tr=trans))
        data['url'] = url
    return data
Example #16
0
def monitor(request, format=None):

    # For each check, a boolean pass/fail status to show in the template
    status_summary = {}
    results = {}

    checks = ['memcache', 'libraries', 'elastic', 'path', 'redis', 'signer']

    for check in checks:
        with statsd.timer('monitor.%s' % check) as timer:
            status, result = getattr(monitors, check)()
        status_summary[check] = status
        results['%s_results' % check] = result
        results['%s_timer' % check] = timer.ms

    # If anything broke, send HTTP 500.
    status_code = 200 if all(status_summary.values()) else 500

    if format == '.json':
        return http.HttpResponse(json.dumps(status_summary),
                                 status=status_code)
    ctx = {}
    ctx.update(results)
    ctx['status_summary'] = status_summary

    return jingo.render(request, 'services/monitor.html',
                        ctx, status=status_code)
Example #17
0
def usernames(request):
    """An API to provide auto-complete data for user names."""
    term = request.GET.get('term', '')
    query = request.GET.get('query', '')
    pre = term or query

    if not pre:
        return []
    if not request.user.is_authenticated():
        return []
    with statsd.timer('users.api.usernames.search'):
        profiles = (
            Profile.objects.filter(Q(name__istartswith=pre))
            .values_list('user_id', flat=True))
        users = (
            User.objects.filter(
                Q(username__istartswith=pre) | Q(id__in=profiles))
            .extra(select={'length': 'Length(username)'})
            .order_by('length').select_related('profile'))

        if not waffle.switch_is_active('users-dont-limit-by-login'):
            last_login = datetime.now() - timedelta(weeks=12)
            users = users.filter(last_login__gte=last_login)

        return [{'username': u.username,
                 'display_name': display_name_or_none(u),
                 'avatar': profile_avatar(u, 24)}
                for u in users[:10]]
Example #18
0
    def sign_file(self):
        """Sign the original file (`file_path`), then move signed extension
        file to the signed path (`signed_file_path`) on public storage. The
        original file remains on private storage.

        Return the signed file size."""
        if not self.extension.uuid:
            raise SigningError('Need uuid to be set to sign')
        if not self.pk:
            raise SigningError('Need version pk to be set to sign')

        ids = json.dumps({
            # 'id' needs to be an unique identifier not shared with anything
            # else (other extensions, langpacks, webapps...), but should not
            # change when there is an update.
            'id': self.extension.uuid,
            # 'version' should be an integer and should be monotonically
            # increasing.
            'version': self.pk
        })
        with statsd.timer('extensions.sign'):
            try:
                # This will read the file from self.file_path, generate a
                # signature and write the signed file to self.signed_file_path.
                sign_app(private_storage.open(self.file_path),
                         self.signed_file_path, ids)
            except SigningError:
                log.info('[ExtensionVersion:%s] Signing failed' % self.pk)
                self.remove_public_signed_file()  # Clean up.
                raise
        return public_storage.size(self.signed_file_path)
Example #19
0
    def call(self):
        """Call the proxied service, return a response."""
        response = http.HttpResponse()
        method = getattr(requests, self.method)
        try:
            with statsd.timer('solitude.proxy.%s.%s' %
                              (self.service, self.name)):
                log.info('Calling service: %s at %s with %s' %
                         (self.service, self.url, self.method))
                dump_request(request=None, method=self.method, url=self.url,
                             body=self.body, headers=self.headers)
                # We aren't calling client._call because that tries to parse
                # the output. Once the headers are prepared, this will do the
                # rest.
                result = method(self.url, data=self.body,
                                headers=self.headers,
                                timeout=self.timeout, verify=True)
        except requests.exceptions.RequestException as err:
            dump_response(status_code=500)
            log.exception('%s: %s' % (err.__class__.__name__, err))
            response.status_code = 500
            return response

        dump_response(response=result)
        if result.status_code < 200 or result.status_code > 299:
            log.error('Warning response status: {0}'
                      .format(result.status_code))

        # Ensure the response passed along is updated with the response given.
        response.status_code = result.status_code
        response.content = result.text
        response['Content-Type'] = result.headers['Content-Type']
        return response
Example #20
0
 def reviewer_sign_file(self):
     """Sign the original file (`file_path`) with reviewer certs, then move
     the signed file to the reviewers-specific signed path
     (`reviewer_signed_file_path`) on private storage."""
     if not self.extension.uuid:
         raise SigningError('Need uuid to be set to sign')
     if not self.pk:
         raise SigningError('Need version pk to be set to sign')
     ids = json.dumps({
         # Reviewers get a unique 'id' so the reviewer installed add-on
         # won't conflict with the public add-on, and also so even multiple
         # versions of the same add-on can be installed side by side with
         # other versions.
         'id': 'reviewer-{guid}-{version_id}'.format(
             guid=self.extension.uuid, version_id=self.pk),
         'version': self.pk
     })
     with statsd.timer('extensions.sign_reviewer'):
         try:
             # This will read the file from self.file_path, generate a
             # reviewer signature and write the signed file to
             # self.reviewer_signed_file_path.
             sign_app(private_storage.open(self.file_path),
                      self.reviewer_signed_file_path, ids, reviewer=True)
         except SigningError:
             log.info(
                 '[ExtensionVersion:%s] Reviewer Signing failed' % self.pk)
             if private_storage.exists(self.reviewer_signed_file_path):
                 private_storage.delete(self.reviewer_signed_file_path)
             raise
Example #21
0
    def lookup(self, address):
        """Resolve an IP address to a block of geo information.

        If a given address is unresolvable or the geoip server is not defined,
        return the default as defined by the settings, or "restofworld".

        """
        if self.url and is_public(address):
            with statsd.timer('z.geoip'):
                res = None
                try:
                    res = requests.post('{0}/country.json'.format(self.url),
                                        timeout=self.timeout,
                                        data={'ip': address})
                except requests.Timeout:
                    statsd.incr('z.geoip.timeout')
                    log.error(('Geodude timed out looking up: {0}'
                               .format(address)))
                except requests.RequestException as e:
                    statsd.incr('z.geoip.error')
                    log.error('Geodude connection error: {0}'.format(str(e)))
                if res and res.status_code == 200:
                    statsd.incr('z.geoip.success')
                    return res.json().get('country_code',
                                          self.default_val).lower()
        return self.default_val
Example #22
0
def sign(version_id, reviewer=False):
    version = Version.objects.get(pk=version_id)
    app = version.addon
    log.info('Signing version: %s of app: %s' % (version_id, app))

    if not app.type == amo.ADDON_WEBAPP:
        log.error('Attempt to sign something other than an app.')
        raise SigningError('Not an app')

    if not app.is_packaged:
        log.error('Attempt to sign a non-packaged app.')
        raise SigningError('Not packaged')

    try:
        file_obj = version.all_files[0]
    except IndexError:
        log.error('Attempt to sign an app with no files in version.')
        raise SigningError('No file')

    path = (file_obj.signed_reviewer_file_path if reviewer else
            file_obj.signed_file_path)
    if storage.exists(path):
        log.info('Already signed app exists.')
        return path

    with statsd.timer('services.sign.app'):
        try:
            sign_app(file_obj.file_path, path, reviewer)
        except SigningError:
            if storage.exists(path):
                storage.delete(path)
            raise
    log.info('Signing complete.')
    return path
Example #23
0
def packager(data, feature_set, **kw):
    """Build an add-on based on input data."""
    log.info("[1@None] Packaging add-on")

    from devhub.views import packager_path

    dest = packager_path(data["slug"])

    with guard(u"devhub.packager.%s" % dest) as locked:
        if locked:
            log.error(u"Packaging in progress: %s" % dest)
            return

        with statsd.timer("devhub.packager"):
            from packager.main import packager

            log.info("Starting packaging: %s" % dest)
            features = set([k for k, v in feature_set.items() if v])
            try:
                packager(data, dest, features)
            except Exception, err:
                log.error(u"Failed to package add-on: %s" % err)
                raise
            if os.path.exists(dest):
                log.info(u"Package saved: %s" % dest)
Example #24
0
 def _wrapped(*args, **kwargs):
     statsd_key = 'jobs.duration.{name}'.format(name=name)
     if stat_suffix:
         statsd_key += '.{key}'.format(key=stat_suffix(*args, **kwargs))
     with statsd.timer(statsd_key):
         result = func(*args, **kwargs)
     return result
Example #25
0
def application(environ, start_response):
    """
    Developing locally?

        gunicorn -b 0.0.0.0:7000 -w 12 -k sync -t 90 --max-requests 5000 \
            -n gunicorn-theme_update services.wsgi.theme_update:application

    """

    status = '200 OK'
    with statsd.timer('services.theme_update'):
        data = environ['wsgi.input'].read()
        try:
            locale, id_ = url_re.match(environ['PATH_INFO']).groups()
            locale = (locale or 'en-US').lstrip('/')
            id_ = int(id_)
        except AttributeError:  # URL path incorrect.
            start_response('404 Not Found', [])
            return ['']

        try:
            update = ThemeUpdate(locale, id_, environ.get('QUERY_STRING'))
            output = update.get_json()
            if not output:
                start_response('404 Not Found', [])
                return ['']
            start_response(status, update.get_headers(len(output)))
        except:
            log_exception(data)
            raise

    return [output]
Example #26
0
def refund(paykey):
    """
    Refund a payment.

    Arguments: paykey of payment to refund

    Returns: A list of dicts containing the refund info for each
    receiver of the original payment.
    """
    OK_STATUSES = ['REFUNDED', 'REFUNDED_PENDING']
    with statsd.timer('paypal.payment.refund'):
        try:
            response = _call(settings.PAYPAL_PAY_URL + 'Refund',
                             {'payKey': paykey})
        except PaypalError:
            paypal_log.error('Refund error', exc_info=True)
            raise
        responses = []
        for k in response:
            g = re.match('refundInfoList.refundInfo\((\d+)\).(.*)', k)
            if g:
                i = int(g.group(1))
                subkey = g.group(2)
                while i >= len(responses):
                    responses.append({})
                responses[i][subkey] = response[k]
        for d in responses:
            if d['refundStatus'] not in OK_STATUSES:
                raise PaypalError('Bad refund status for %s: %s'
                                  % (d['receiver.email'],
                                     d['refundStatus']))
            paypal_log.debug('Refund successful for: %s, %s, %s' %
                             (paykey, d['receiver.email'], d['refundStatus']))

        return responses
Example #27
0
def monitor(request, format=None):

    # For each check, a boolean pass/fail status to show in the template
    status_summary = {}
    results = {}

    checks = ['memcache', 'libraries', 'elastic', 'package_signer', 'path',
              'redis', 'receipt_signer', 'settings_check', 'solitude']

    for check in checks:
        with statsd.timer('monitor.%s' % check) as timer:
            status, result = getattr(monitors, check)()
        # state is a string. If it is empty, that means everything is fine.
        status_summary[check] = {'state': not status,
                                 'status': status}
        results['%s_results' % check] = result
        results['%s_timer' % check] = timer.ms

    # If anything broke, send HTTP 500.
    status_code = 200 if all(a['state']
                             for a in status_summary.values()) else 500

    if format == '.json':
        return http.HttpResponse(json.dumps(status_summary),
                                 status=status_code)
    ctx = {}
    ctx.update(results)
    ctx['status_summary'] = status_summary

    return render(request, 'services/monitor.html', ctx, status=status_code)
Example #28
0
    def list(self, request, *args, **kwargs):
        if (not settings.RECOMMENDATIONS_ENABLED or
                not settings.RECOMMENDATIONS_API_URL or
                not self.request.user.is_authenticated()):
            return self._popular()
        else:
            app_ids = []
            url = '{base_url}/api/v2/recommend/{limit}/{user_hash}/'.format(
                base_url=settings.RECOMMENDATIONS_API_URL,
                limit=20, user_hash=self.request.user.recommendation_hash)

            try:
                with statsd.timer('recommendation.get'):
                    resp = requests.get(
                        url, timeout=settings.RECOMMENDATIONS_API_TIMEOUT)
                if resp.status_code == 200:
                    app_ids = resp.json()['recommendations']
            except Timeout as e:
                log.warning(u'Recommendation timeout: {error}'.format(error=e))
            except RequestException as e:
                # On recommendation API exceptions we return popular.
                log.error(u'Recommendation exception: {error}'.format(error=e))

            if not app_ids:
                # Fall back to a popularity search.
                return self._popular()

            sq = WebappIndexer.get_app_filter(self.request, app_ids=app_ids)
            return Response({
                'objects': self.serializer_class(
                    sq.execute().hits, many=True,
                    context={'request': self.request}).data})
Example #29
0
    def obj_create(self, bundle, request, **kwargs):
        with statsd.timer('auth.browserid.verify'):
            profile, msg = browserid_authenticate(
                request, bundle.data['assertion'],
                browserid_audience=bundle.data['audience'],
                is_native=bundle.data.get('is_native', False)
            )
        if profile is None:
            log.info('No profile: %s' % (msg or ''))
            raise http_error(http.HttpUnauthorized,
                             'No profile.')

        request.user, request.amo_user = profile.user, profile
        request.groups = profile.groups.all()

        # TODO: move this to the signal.
        profile.log_login_attempt(True)
        user_logged_in.send(sender=profile.user.__class__, request=request,
                            user=profile.user)
        bundle.data = {
            'error': None,
            'token': self.get_token(request.amo_user.email),
            'settings': {
                'display_name': request.amo_user.display_name,
                'email': request.amo_user.email,
            }
        }
        bundle.data.update(PermissionResource()
                           .dehydrate(Bundle(request=request)).data)
        return bundle
Example #30
0
    def call(self, url, method_name, data=None):
        log.info("Deprecated, please use curling: %s, %s" % (url, method_name))
        if data and method_name.lower() == "get":
            raise TypeError("You cannot use data in a GET request. " "Maybe you meant to use filters=...")

        data = json.dumps(data, cls=self.encoder or Encoder) if data else json.dumps({})
        method = getattr(requests, method_name)

        try:
            with statsd.timer("solitude.call.%s" % method_name):
                result = method(
                    url, data=data, headers={"content-type": "application/json"}, timeout=self.config.get("timeout", 10)
                )
        except requests.ConnectionError:
            log.error("Solitude not accessible")
            raise SolitudeOffline(general_error)
        except requests.Timeout:
            log.error("Solitude timed out, limit %s" % self.config.get("timeout", 10))
            raise SolitudeTimeout(general_error)

        if result.status_code in (200, 201, 202, 204):
            return json.loads(result.text) if result.text else {}
        else:
            log.error("Solitude error with %s: %r" % (url, result.text))
            res = {}
            try:
                res = json.loads(result.text) if result.text else {}
            except:
                # Not a JSON error.
                pass
            code = res.get("error_code", 0)
            raise SolitudeError(lookup(code, res.get("error_data", {})), code=code)
Example #31
0
 def wrapper(*args, **kw):
     if test_only and not settings.IN_TEST_SUITE:
         return func(*args, **kw)
     else:
         name = (key if key else '%s.%s' %
                 (func.__module__, func.__name__))
         with statsd.timer('timer.%s' % name):
             return func(*args, **kw)
Example #32
0
def _paypal(request):
    # Must be this way around.
    post, raw = request.POST.copy(), request.read()
    paypal_log.info('IPN received: %s' % raw)

    # Check that the request is valid and coming from PayPal.
    # The order of the params has to match the original request.
    data = u'cmd=_notify-validate&' + raw
    with statsd.timer('paypal.validate-ipn'):
        paypal_response = requests.post(settings.PAYPAL_CGI_URL,
                                        data,
                                        verify=True,
                                        cert=settings.PAYPAL_CERT)

    post, transactions = _parse(post)

    # If paypal doesn't like us, fail.
    if paypal_response.text != 'VERIFIED':
        msg = ("Expecting 'VERIFIED' from PayPal, got '%s'. "
               "Failing." % paypal_response)
        _log_error_with_data(msg, post)
        return http.HttpResponseForbidden('Invalid confirmation')

    payment_status = post.get('payment_status', '').lower()
    if payment_status != 'completed':
        paypal_log.info('Payment status not completed: %s, %s' %
                        (post.get('txn_id', ''), payment_status))
        return http.HttpResponse('Ignoring %s' % post.get('txn_id', ''))

    # There could be multiple transactions on the IPN. This will deal
    # with them appropriately or cope if we don't know how to deal with
    # any of them.
    methods = {'completed': paypal_completed}
    result = None
    called = False
    # Ensure that we process 0, then 1 etc.
    for (k, v) in sorted(transactions.items()):
        status = v.get('status', '').lower()
        if status not in methods:
            paypal_log.info('Unknown status: %s' % status)
            continue
        result = methods[status](request, post.get('txn_id'), post,
                                 v.get('amount'))
        called = True
        # Because of chained payments a refund is more than one transaction.
        # But from our point of view, it's actually only one transaction and
        # we can safely ignore the rest.
        if result.content == 'Success!' and status == 'refunded':
            break

    if not called:
        # Whilst the payment status was completed, it contained
        # no transactions with status, which means we don't know
        # how to process it. Hence it's being ignored.
        paypal_log.info('No methods to call on: %s' % post.get('txn_id', ''))
        return http.HttpResponse('Ignoring %s' % post.get('txn_id', ''))

    return result
Example #33
0
def run_yara(results, upload_pk):
    """
    Apply a set of Yara rules on a FileUpload and store the Yara results
    (matches).

    This task is intended to be run as part of the submission process only.
    When a version is created from a FileUpload, the files are removed. In
    addition, we usually delete old FileUpload entries after 180 days.

    - `results` are the validation results passed in the validation chain. This
       task is a validation task, which is why it must receive the validation
       results as first argument.
    - `upload_pk` is the FileUpload ID.
    """
    log.info('Starting yara task for FileUpload %s.', upload_pk)

    if not results['metadata']['is_webextension']:
        log.info(
            'Not running yara for FileUpload %s, it is not a '
            'webextension.', upload_pk)
        return results

    upload = FileUpload.objects.get(pk=upload_pk)

    try:
        scanner_result = ScannerResult(upload=upload, scanner=YARA)

        with statsd.timer('devhub.yara'):
            rules = yara.compile(filepath=settings.YARA_RULES_FILEPATH)

            zip_file = SafeZip(source=upload.path)
            for zip_info in zip_file.info_list:
                if not zip_info.is_dir():
                    file_content = zip_file.read(zip_info).decode(
                        errors='ignore')
                    for match in rules.match(data=file_content):
                        # Add the filename to the meta dict.
                        meta = {**match.meta, 'filename': zip_info.filename}
                        scanner_result.add_yara_result(rule=match.rule,
                                                       tags=match.tags,
                                                       meta=meta)
            zip_file.close()

        scanner_result.save()

        if scanner_result.has_matches:
            statsd.incr('devhub.yara.has_matches')

        statsd.incr('devhub.yara.success')
        log.info('Ending scanner "yara" task for FileUpload %s.', upload_pk)
    except Exception:
        statsd.incr('devhub.yara.failure')
        # We log the exception but we do not raise to avoid perturbing the
        # submission flow.
        log.exception('Error in scanner "yara" task for FileUpload %s.',
                      upload_pk)

    return results
Example #34
0
def collect_tweets():
    # Don't (ab)use the twitter API from dev and stage.
    if settings.STAGE:
        return
    """Collect new tweets about Firefox."""
    with statsd.timer('customercare.tweets.time_elapsed'):
        t = get_twitter_api()

        search_options = {
            'q': ('firefox OR #fxinput OR @firefoxbrasil OR #firefoxos '
                  'OR @firefox_es'),
            'count':
            settings.CC_TWEETS_PERPAGE,  # Items per page.
            'result_type':
            'recent',  # Retrieve tweets by date.
        }

        # If we already have some tweets, collect nothing older than what we
        # have.
        try:
            latest_tweet = Tweet.latest()
        except Tweet.DoesNotExist:
            log.debug('No existing tweets. Retrieving %d tweets from search.' %
                      settings.CC_TWEETS_PERPAGE)
        else:
            search_options['since_id'] = latest_tweet.tweet_id
            log.info('Retrieving tweets with id >= %s' % latest_tweet.tweet_id)

        # Retrieve Tweets
        results = t.search(**search_options)

        if len(results['statuses']) == 0:
            # Twitter returned 0 results.
            return

        # Drop tweets into DB
        for item in results['statuses']:
            # Apply filters to tweet before saving
            # Allow links in #fxinput tweets
            statsd.incr('customercare.tweet.collected')
            item = _filter_tweet(item, allow_links='#fxinput' in item['text'])
            if not item:
                continue

            created_date = datetime.utcfromtimestamp(
                calendar.timegm(rfc822.parsedate(item['created_at'])))

            item_lang = item['metadata'].get('iso_language_code', 'en')

            tweet = Tweet(tweet_id=item['id'],
                          raw_json=json.dumps(item),
                          locale=item_lang,
                          created=created_date)
            try:
                tweet.save()
                statsd.incr('customercare.tweet.saved')
            except IntegrityError:
                pass
Example #35
0
def run_scanner(results, upload_pk, scanner, api_url, api_key):
    """
    Run a scanner on a FileUpload via RPC and store the results.

    - `results` are the validation results passed in the validation chain. This
       task is a validation task, which is why it must receive the validation
       results as first argument.
    - `upload_pk` is the FileUpload ID.
    """
    scanner_name = SCANNERS.get(scanner)
    log.info('Starting scanner "%s" task for FileUpload %s.', scanner_name,
             upload_pk)

    if not results['metadata']['is_webextension']:
        log.info('Not running scanner "%s" for FileUpload %s, it is not a '
                 'webextension.', scanner_name, upload_pk)
        return results

    upload = FileUpload.objects.get(pk=upload_pk)

    try:
        if not os.path.exists(upload.path):
            raise ValueError('File "{}" does not exist.'.format(upload.path))

        scanner_result = ScannerResult(upload=upload, scanner=scanner)

        with statsd.timer('devhub.{}'.format(scanner_name)):
            json_payload = {
                'api_key': api_key,
                'download_url': upload.get_authenticated_download_url(),
            }
            response = requests.post(url=api_url,
                                     json=json_payload,
                                     timeout=settings.SCANNER_TIMEOUT)

        try:
            data = response.json()
        except ValueError:
            # Log the response body when JSON decoding has failed.
            raise ValueError(response.text)

        if response.status_code != 200 or 'error' in data:
            raise ValueError(data)

        scanner_result.results = data
        scanner_result.save()

        statsd.incr('devhub.{}.success'.format(scanner_name))
        log.info('Ending scanner "%s" task for FileUpload %s.', scanner_name,
                 upload_pk)
    except Exception:
        statsd.incr('devhub.{}.failure'.format(scanner_name))
        # We log the exception but we do not raise to avoid perturbing the
        # submission flow.
        log.exception('Error in scanner "%s" task for FileUpload %s.',
                      scanner_name, upload_pk)

    return results
Example #36
0
def run_validator(file_path,
                  for_appversions=None,
                  test_all_tiers=False,
                  overrides=None):
    """A pre-configured wrapper around the addon validator.

    *file_path*
        Path to addon / extension file to validate.

    *for_appversions=None*
        An optional dict of application versions to validate this addon
        for. The key is an application GUID and its value is a list of
        versions.

    *test_all_tiers=False*
        When False (default) the validator will not continue if it
        encounters fatal errors.  When True, all tests in all tiers are run.
        See bug 615426 for discussion on this default.

    *overrides=None*
        Normally the validator gets info from install.rdf but there are a
        few things we need to override. See validator for supported overrides.
        Example: {'targetapp_maxVersion': {'<app guid>': '<version>'}}

    To validate the addon for compatibility with Firefox 5 and 6,
    you'd pass in::

        for_appversions={amo.FIREFOX.guid: ['5.0.*', '6.0.*']}

    Not all application versions will have a set of registered
    compatibility tests.
    """

    from validator.validate import validate

    # TODO(Kumar) remove this when validator is fixed, see bug 620503
    from validator.testcases import scripting
    scripting.SPIDERMONKEY_INSTALLATION = settings.SPIDERMONKEY
    import validator.constants
    validator.constants.SPIDERMONKEY_INSTALLATION = settings.SPIDERMONKEY

    apps = dump_apps.Command.JSON_PATH
    if not os.path.exists(apps):
        call_command('dump_apps')

    with statsd.timer('devhub.validator'):
        return validate(
            file_path,
            for_appversions=for_appversions,
            format='json',
            # When False, this flag says to stop testing after one
            # tier fails.
            determined=test_all_tiers,
            approved_applications=apps,
            spidermonkey=settings.SPIDERMONKEY,
            overrides=overrides,
            timeout=settings.VALIDATOR_TIMEOUT)
Example #37
0
def run_validator(file_path):
    """A pre-configured wrapper around the app validator."""

    with statsd.timer('mkt.developers.validator'):
        is_packaged = zipfile.is_zipfile(file_path)
        if is_packaged:
            log.info(u'Running `validate_packaged_app` for path: %s'
                     % (file_path))
            with statsd.timer('mkt.developers.validate_packaged_app'):
                return validate_packaged_app(file_path,
                    market_urls=settings.VALIDATOR_IAF_URLS,
                    timeout=settings.VALIDATOR_TIMEOUT,
                    spidermonkey=settings.SPIDERMONKEY)
        else:
            log.info(u'Running `validate_app` for path: %s' % (file_path))
            with statsd.timer('mkt.developers.validate_app'):
                return validate_app(storage.open(file_path).read(),
                    market_urls=settings.VALIDATOR_IAF_URLS)
Example #38
0
def run_scanner(results, upload_pk, scanner, api_url, api_key):
    """
    Run a scanner on a FileUpload via RPC and store the results.

    - `results` are the validation results passed in the validation chain. This
       task is a validation task, which is why it must receive the validation
       results as first argument.
    - `upload_pk` is the FileUpload ID.
    """
    scanner_name = SCANNERS.get(scanner)
    log.info('Starting scanner "%s" task for FileUpload %s.', scanner_name,
             upload_pk)

    if not results['metadata']['is_webextension']:
        log.info(
            'Not running scanner "%s" for FileUpload %s, it is not a webextension.',
            scanner_name,
            upload_pk,
        )
        return results

    upload = FileUpload.objects.get(pk=upload_pk)

    try:
        if not os.path.exists(upload.path):
            raise ValueError('File "{}" does not exist.'.format(upload.path))

        scanner_result = ScannerResult(upload=upload, scanner=scanner)

        with statsd.timer('devhub.{}'.format(scanner_name)):
            _run_scanner_for_url(
                scanner_result,
                upload.get_authenticated_download_url(),
                scanner,
                api_url,
                api_key,
            )

        scanner_result.save()

        if scanner_result.has_matches:
            statsd.incr('devhub.{}.has_matches'.format(scanner_name))
            for scanner_rule in scanner_result.matched_rules.all():
                statsd.incr('devhub.{}.rule.{}.match'.format(
                    scanner_name, scanner_rule.id))

        statsd.incr('devhub.{}.success'.format(scanner_name))
        log.info('Ending scanner "%s" task for FileUpload %s.', scanner_name,
                 upload_pk)
    except Exception as exc:
        statsd.incr('devhub.{}.failure'.format(scanner_name))
        log.exception('Error in scanner "%s" task for FileUpload %s.',
                      scanner_name, upload_pk)
        if not waffle.switch_is_active('ignore-exceptions-in-scanner-tasks'):
            raise exc

    return results
Example #39
0
def sign(version):
    if not version.all_files:
        log.error('Attempt to sign version %s with no files.' % version.pk)
        raise SigningError('No file')

    log.info('Signing version: %s' % version.pk)

    for file_obj in [x for x in version.all_files if x.can_be_signed()]:
        with statsd.timer('services.sign.addon'):
            sign_file(file_obj)
Example #40
0
def _fetch_content(url):
    with statsd.timer('developers.tasks.fetch_content'):
        try:
            return urllib2.urlopen(url, timeout=30)
        except urllib2.HTTPError, e:
            raise Exception(
                _('%s responded with %s (%s).') % (url, e.code, e.msg))
        except urllib2.URLError, e:
            # Unpack the URLError to try and find a useful message.
            raise Exception(_('The file could not be retrieved.'))
Example #41
0
    def http_do(self, verb, path, headers, body):
        # Tell solitude-auth where we really want this request to go to.
        headers['x-solitude-service'] = self.environment._real.base_url + path
        # Set the URL of the request to point to the auth server.
        path = self.environment._url.path

        with statsd.timer('solitude.braintree.api'):
            status, text = super(Http, self).http_do(verb, path, headers, body)
        statsd.incr('solitude.braintree.response.{0}'.format(status))
        return status, text
Example #42
0
def autograph_sign_data(file_obj):
    """Sign `file_obj` via autographs /sign/data endpoint.

    .. note::

        This endpoint usage is being replaced by `autograph_sign_file`.

    :returns: The certificates serial number.
    """
    conf = settings.AUTOGRAPH_CONFIG

    jar = JarExtractor(path=storage.open(file_obj.current_file_path))

    log.debug(u'File signature contents: {0}'.format(jar.signatures))

    signed_manifest = six.text_type(jar.signatures)

    signing_request = [{
        'input':
        force_text(b64encode(force_bytes(signed_manifest))),
        'keyid':
        conf['signer'],
        'options': {
            'id': get_id(file_obj.version.addon),
        },
    }]

    with statsd.timer('services.sign.addon.autograph'):
        response = requests.post(
            '{server}/sign/data'.format(server=conf['server_url']),
            json=signing_request,
            auth=HawkAuth(id=conf['user_id'], key=conf['key']))

    if response.status_code != requests.codes.CREATED:
        msg = u'Posting to add-on signing failed: {0} {1}'.format(
            response.reason, response.text)
        log.error(msg)
        raise SigningError(msg)

    # convert the base64 encoded pkcs7 signature back to binary
    pkcs7 = b64decode(force_bytes(response.json()[0]['signature']))

    cert_serial_num = get_signer_serial_number(pkcs7)

    # We only want the (unique) temporary file name.
    with tempfile.NamedTemporaryFile(dir=settings.TMP_PATH) as temp_file:
        temp_filename = temp_file.name

    jar.make_signed(signed_manifest=signed_manifest,
                    signature=pkcs7,
                    sigpath=u'mozilla',
                    outpath=temp_filename)
    shutil.move(temp_filename, file_obj.current_file_path)

    return cert_serial_num
Example #43
0
def receipt_check(environ):
    output = ''
    with statsd.timer('services.verify'):
        data = environ['wsgi.input'].read()
        try:
            verify = Verify(data, environ)
            return 200, json.dumps(verify.check_full())
        except:
            log_exception('<none>')
            return 500, ''
    return output
Example #44
0
    def _get(self, url):
        # Strip the boku part out of the URL and insert the proxy instead.
        url = urlunparse(('', '') + urlparse(url)[2:])
        # url_join takes care of missing or extra / in urls, but we must strip
        # the first / off the url above.
        proxy = url_join(settings.BOKU_PROXY, 'boku', url[1:])

        # Now continue as normal, call the proxy.
        log.info('Boku proxy client call: {url}'.format(url=proxy))
        with statsd.timer('solitude.boku.api'):
            return requests.get(proxy)
Example #45
0
def call_signing(file_obj):
    """Sign `file_obj` via autographs /sign/file endpoint.

    :returns: The certificates serial number.
    """
    conf = settings.AUTOGRAPH_CONFIG

    with storage.open(file_obj.current_file_path) as fobj:
        input_data = force_text(b64encode(fobj.read()))

    signing_request = [{
        'input': input_data,
        'keyid': conf['signer'],
        'options': {
            'id': get_id(file_obj.version.addon),
            # "Add-on variant A params (PKCS7 SHA1 and COSE ES256) work in
            # Fx <57, so we can switch to that without breaking backwards
            # compatibility"
            # https://github.com/mozilla/addons-server/issues/9308
            # This means, the pkcs7 sha1 signature is used for backwards
            # compatibility and cose sha256 will be used for newer
            # Firefox versions.
            # The relevant pref in Firefox is
            # "security.signed_app_signatures.policy"
            # where it's set to COSEAndPKCS7WithSHA1OrSHA256 to match
            # these settings.
            'pkcs7_digest': 'SHA1',
            'cose_algorithms': ['ES256']
        },
    }]

    with statsd.timer('services.sign.addon.autograph'):
        response = requests.post(
            '{server}/sign/file'.format(server=conf['server_url']),
            json=signing_request,
            auth=HawkAuth(id=conf['user_id'], key=conf['key']))

    if response.status_code != requests.codes.CREATED:
        msg = u'Posting to add-on signing failed: {0} {1}'.format(
            response.reason, response.text)
        log.error(msg)
        raise SigningError(msg)

    # Save the returned file in our storage.
    with storage.open(file_obj.current_file_path, 'wb') as fobj:
        fobj.write(b64decode(response.json()[0]['signed_file']))

    # Now fetch the certificates serial number. Future versions of
    # autograph may return this in the response.
    # https://github.com/mozilla-services/autograph/issues/214
    # Now extract the file and fetch the pkcs signature
    with zipfile.ZipFile(file_obj.current_file_path, mode='r') as zip_fobj:
        return get_signer_serial_number(
            zip_fobj.read(os.path.join('META-INF', 'mozilla.rsa')))
Example #46
0
 def call(api, data):
     try:
         with statsd.timer('paypal.get.personal'):
             r = _call(settings.PAYPAL_PERMISSIONS_URL + api,
                       data,
                       token=token)
     except PaypalError, error:
         paypal_log.debug('Paypal returned an error when getting personal'
                          'data for token: %s... error: %s' %
                          (token[:10], error))
         raise
Example #47
0
def get_permissions_token(request_token, verification_code):
    """
    Send request for permissions token, after user has granted the
    requested permissions via the PayPal page we redirected them to.
    """
    with statsd.timer('paypal.permissions.token'):
        r = _call(settings.PAYPAL_PERMISSIONS_URL + 'GetAccessToken', {
            'token': request_token,
            'verifier': verification_code
        })
    return urllib.urlencode({'token': r['token'], 'secret': r['tokenSecret']})
Example #48
0
def auto_import_blocklist():
    if not waffle.switch_is_active('blocklist_auto_import'):
        log.info('Automatic import_blocklist cron job disabled.')
        return
    with statsd.timer('blocklist.cron.import_blocklist'):
        try:
            call_command('import_blocklist')
        except CommandError as err:
            statsd.incr('blocklist.cron.import_blocklist.failure')
            raise err
    statsd.incr('blocklist.cron.import_blocklist.success')
Example #49
0
def sign(version_id, reviewer=False, resign=False, **kw):
    version = Version.objects.get(pk=version_id)
    app = version.addon
    log.info('Signing version: %s of app: %s' % (version_id, app))

    if not app.is_packaged:
        log.error('[Webapp:%s] Attempt to sign a non-packaged app.' % app.id)
        raise SigningError('Not packaged')

    try:
        file_obj = version.all_files[0]
    except IndexError:
        log.error(
            '[Webapp:%s] Attempt to sign an app with no files in version.' %
            app.id)
        raise SigningError('No file')

    path = (file_obj.signed_reviewer_file_path if reviewer else
            file_obj.signed_file_path)

    storage = private_storage if reviewer else public_storage

    if storage.exists(path) and not resign:
        log.info('[Webapp:%s] Already signed app exists.' % app.id)
        return path

    if reviewer:
        # Reviewers get a unique 'id' so the reviewer installed app won't
        # conflict with the public app, and also so multiple versions of the
        # same app won't conflict with themselves.
        ids = json.dumps({
            'id': 'reviewer-{guid}-{version_id}'.format(guid=app.guid,
                                                        version_id=version_id),
            'version': version_id
        })
    else:
        ids = json.dumps({
            'id': app.guid,
            'version': version_id
        })
    with statsd.timer('services.sign.app'):
        try:
            # Signing starts with the original packaged app file which is
            # always on private storage.
            sign_app(private_storage.open(file_obj.file_path), path, ids,
                     reviewer)
        except SigningError:
            log.info('[Webapp:%s] Signing failed' % app.id)
            if storage.exists(path):
                storage.delete(path)
            raise
    log.info('[Webapp:%s] Signing complete.' % app.id)
    return path
Example #50
0
            def timed(self, *args, **kwargs):
                global MEMCACHED_COUNT

                MEMCACHED_COUNT += 1
                if MEMCACHED_COUNT >= settings.SAMPLE_RATE:
                    MEMCACHED_COUNT = 0
                    key = 'memcached.{}.{}'.format(get_view_name(),
                                                   orig.__name__)
                    with statsd.timer(key):
                        return orig(self, *args, **kwargs)
                else:
                    return orig(self, *args, **kwargs)
Example #51
0
def run_scanner(upload_pk, scanner, api_url, api_key):
    """
    Run a scanner on a FileUpload via RPC and store the results.
    """
    scanner_name = SCANNERS.get(scanner)
    log.info('Starting scanner "%s" task for FileUpload %s.', scanner_name,
             upload_pk)

    upload = FileUpload.objects.get(pk=upload_pk)

    if not upload.path.endswith('.xpi'):
        log.info('Not running scanner "%s" for FileUpload %s, it is not a xpi '
                 'file.', scanner_name, upload_pk)
        return

    try:
        if not os.path.exists(upload.path):
            raise ValueError('File "{}" does not exist.' .format(upload.path))

        result = ScannersResult()
        result.upload = upload
        result.scanner = scanner

        with statsd.timer('devhub.{}'.format(scanner_name)):
            json_payload = {
                'api_key': api_key,
                'download_url': upload.get_authenticated_download_url(),
            }
            response = requests.post(url=api_url,
                                     json=json_payload,
                                     timeout=settings.SCANNER_TIMEOUT)

        try:
            results = response.json()
        except ValueError:
            # Log the response body when JSON decoding has failed.
            raise ValueError(response.text)

        if 'error' in results:
            raise ValueError(results)

        result.results = results
        result.save()

        statsd.incr('devhub.{}.success'.format(scanner_name))
        log.info('Ending scanner "%s" task for FileUpload %s.', scanner_name,
                 upload_pk)
    except Exception:
        statsd.incr('devhub.{}.failure'.format(scanner_name))
        # We log the exception but we do not raise to avoid perturbing the
        # submission flow.
        log.exception('Error in scanner "%s" task for FileUpload %s.',
                      scanner_name, upload_pk)
Example #52
0
def run_addons_linter(path, channel):
    from .utils import fix_addons_linter_output

    args = [settings.ADDONS_LINTER_BIN, path, '--boring', '--output=json']

    if channel == amo.RELEASE_CHANNEL_UNLISTED:
        args.append('--self-hosted')

    if waffle.switch_is_active('disable-linter-xpi-autoclose'):
        args.append('--disable-xpi-autoclose')

    if waffle.switch_is_active('enable-mv3-submissions'):
        args.append('--max-manifest-version=3')
    else:
        args.append('--max-manifest-version=2')

    if not os.path.exists(path):
        raise ValueError(
            'Path "{}" is not a file or directory or does not exist.'.format(
                path))

    stdout, stderr = (tempfile.TemporaryFile(), tempfile.TemporaryFile())

    with statsd.timer('devhub.linter'):
        process = subprocess.Popen(
            args,
            stdout=stdout,
            stderr=stderr,
            # default but explicitly set to make sure we don't open a shell.
            shell=False,
        )

        process.wait()

        stdout.seek(0)
        stderr.seek(0)

        output, error = stdout.read(), stderr.read()

        # Make sure we close all descriptors, otherwise they'll hang around
        # and could cause a nasty exception.
        stdout.close()
        stderr.close()

    if error:
        raise ValueError(error)

    parsed_data = json.loads(force_str(output))

    result = json.dumps(fix_addons_linter_output(parsed_data, channel))
    track_validation_stats(result)

    return result
Example #53
0
            def timed_execute(self, *args, **kwargs):
                global CASSANDRA_COUNT

                CASSANDRA_COUNT += 1
                if CASSANDRA_COUNT >= settings.SAMPLE_RATE:
                    CASSANDRA_COUNT = 0
                    key = 'cassandra.{}.execute'.format(get_view_name())
                    statsd.incr(key)
                    with statsd.timer(key):
                        return orig(self, *args, **kwargs)
                else:
                    return orig(self, *args, **kwargs)
Example #54
0
def application(environ, start_response):
    status = '200 OK'

    with statsd.timer('services.pfs'):
        data = dict(parse_qsl(environ['QUERY_STRING']))
        try:
            output = get_output(data).encode('utf-8')
            start_response(status, get_headers(len(output)))
        except:
            log_exception(data)
            raise
        return [output]
Example #55
0
 def raw(self):
     qs = self._build_query()
     es = get_es()
     try:
         with statsd.timer('search.es.timer') as timer:
             hits = es.search(qs, self.index, self.type._meta.db_table)
     except Exception:
         log.error(qs)
         raise
     statsd.timing('search.es.took', hits['took'])
     log.debug('[%s] [%s] %s' % (hits['took'], timer.ms, qs))
     return hits
Example #56
0
def wiki_to_html(wiki_markup, locale=settings.WIKI_DEFAULT_LANGUAGE,
                 doc_id=None, parser_cls=None):
    """Wiki Markup -> HTML with the wiki app's enhanced parser"""
    if parser_cls is None:
        parser_cls = WikiParser

    with statsd.timer('wiki.render'):
        with uselocale(locale):
            content = parser_cls(doc_id=doc_id).parse(
                wiki_markup, show_toc=False, locale=locale,
                toc_string=_('Table of Contents'))
    return content
Example #57
0
 def post(self, url, data):
     msg = data.get('state', 'comment')
     log.info('Setting github to: {} at: {}'.format(msg, url))
     with statsd.timer('github.{}'.format(msg)):
         data['context'] = 'mozilla/addons-linter'
         log.info('Body: {}'.format(data))
         res = requests.post(url,
                             json=data,
                             auth=(settings.GITHUB_API_USER,
                                   settings.GITHUB_API_TOKEN))
         log.info('Response: {}'.format(res.content))
         res.raise_for_status()
Example #58
0
def run_addons_linter(path, listed=True):
    from .utils import fix_addons_linter_output

    args = [
        settings.ADDONS_LINTER_BIN,
        path,
        '--boring',
        '--output=json'
    ]

    if not listed:
        args.append('--self-hosted')

    if not os.path.exists(path):
        raise ValueError(
            'Path "{}" is not a file or directory or does not exist.'
            .format(path))

    stdout, stderr = (
        tempfile.TemporaryFile(),
        tempfile.TemporaryFile())

    with statsd.timer('devhub.linter'):
        process = subprocess.Popen(
            args,
            stdout=stdout,
            stderr=stderr,
            # default but explicitly set to make sure we don't open a shell.
            shell=False
        )

        process.wait()

        stdout.seek(0)
        stderr.seek(0)

        output, error = stdout.read(), stderr.read()

        # Make sure we close all descriptors, otherwise they'll hang around
        # and could cause a nasty exception.
        stdout.close()
        stderr.close()

    if error:
        raise ValueError(error)

    parsed_data = json.loads(output)

    result = json.dumps(fix_addons_linter_output(parsed_data, listed))
    track_validation_stats(result, addons_linter=True)

    return result
Example #59
0
def application(environ, start_response):
    status = '200 OK'
    with statsd.timer('services.update'):
        data = dict(parse_qsl(environ['QUERY_STRING']))
        compat_mode = data.pop('compatMode', 'strict')
        try:
            update = Update(data, compat_mode)
            output = force_bytes(update.get_output())
            start_response(status, update.get_headers(len(output)))
        except Exception as e:
            log.exception(e)
            raise
    return [output]
Example #60
0
def fxa_identify(code, config=None):
    """Get an FxA profile for an access token. If identification fails an
    IdentificationError is raised."""
    try:
        with statsd.timer('accounts.fxa.identify.all'):
            token = get_fxa_token(code, config)['access_token']
            profile = get_fxa_profile(token, config)
    except:
        statsd.incr('accounts.fxa.identify.all.fail')
        raise
    else:
        statsd.incr('accounts.fxa.identify.all.success')
        return profile