Пример #1
0
 def wrapper(task, akismet_results, id_or_path, **kwargs):
     # This is necessary to prevent timeout exceptions from being set
     # as our result, and replacing the partial validation results we'd
     # prefer to return.
     task.ignore_result = True
     try:
         data = fn(id_or_path, **kwargs)
         results = json.loads(force_text(data))
         if akismet_results:
             annotations.annotate_akismet_spam_check(
                 results, akismet_results)
         return results
     except UnsupportedFileType as exc:
         results = deepcopy(amo.VALIDATOR_SKELETON_RESULTS)
         annotations.insert_validation_message(
             results,
             type_='error',
             message=exc.message,
             msg_id='unsupported_filetype',
             compatibility_type=None)
         return results
     except Exception as exc:
         log.exception('Unhandled error during validation: %r' % exc)
         return deepcopy(amo.VALIDATOR_SKELETON_EXCEPTION_WEBEXT)
     finally:
         # But we do want to return a result after that exception has
         # been handled.
         task.ignore_result = False
Пример #2
0
    def wrapper(task, akismet_results, id_, hash_, *args, **kw):
        # This is necessary to prevent timeout exceptions from being set
        # as our result, and replacing the partial validation results we'd
        # prefer to return.
        task.ignore_result = True
        try:
            data = fn(id_, hash_, *args, **kw)
            result = json.loads(data)
            if akismet_results:
                annotations.annotate_akismet_spam_check(
                    result, akismet_results)
            return result
        except Exception as e:
            log.exception('Unhandled error during validation: %r' % e)

            is_webextension = kw.get('is_webextension', False)
            if is_webextension:
                return deepcopy(amo.VALIDATOR_SKELETON_EXCEPTION_WEBEXT)
            return deepcopy(amo.VALIDATOR_SKELETON_EXCEPTION)
        finally:
            # But we do want to return a result after that exception has
            # been handled.
            task.ignore_result = False
Пример #3
0
def handle_upload_validation_result(
        results, upload_pk, channel, is_mozilla_signed):
    """Annotate a set of validation results and save them to the given
    FileUpload instance."""
    upload = FileUpload.objects.get(pk=upload_pk)

    # Check for API keys in submissions.
    # Make sure it is extension-like, e.g. no search plugin
    try:
        results = check_for_api_keys_in_file(results=results, upload=upload)
    except (ValidationError, BadZipfile, IOError):
        pass

    # Annotate results with akismet reports results if there are any.
    reports = AkismetReport.objects.filter(upload_instance=upload)
    akismet_results = [
        (report.comment_type, report.result) for report in reports]
    annotations.annotate_akismet_spam_check(results, akismet_results)

    # Annotate results with potential webext warnings on new versions.
    if upload.addon_id and upload.version:
        annotations.annotate_webext_incompatibilities(
            results=results, file_=None, addon=upload.addon,
            version_string=upload.version, channel=channel)

    upload.validation = json.dumps(results)
    upload.save()  # We want to hit the custom save().

    # Track the time it took from first upload through validation
    # until the results were processed and saved.
    upload_start = utc_millesecs_from_epoch(upload.created)
    now = datetime.datetime.now()
    now_ts = utc_millesecs_from_epoch(now)
    delta = now_ts - upload_start
    statsd.timing('devhub.validation_results_processed', delta)

    if not storage.exists(upload.path):
        # TODO: actually fix this so we can get stats. It seems that
        # the file maybe gets moved but it needs more investigation.
        log.warning('Scaled upload stats were not tracked. File is '
                    'missing: {}'.format(upload.path))
        return

    size = Decimal(storage.size(upload.path))
    megabyte = Decimal(1024 * 1024)

    # Stash separate metrics for small / large files.
    quantifier = 'over' if size > megabyte else 'under'
    statsd.timing(
        'devhub.validation_results_processed_{}_1mb'.format(quantifier), delta)

    # Scale the upload / processing time by package size (in MB)
    # so we can normalize large XPIs which naturally take longer to validate.
    scaled_delta = None
    size_in_mb = size / megabyte
    if size > 0:
        # If the package is smaller than 1MB, don't scale it. This should
        # help account for validator setup time.
        unit = size_in_mb if size > megabyte else Decimal(1)
        scaled_delta = Decimal(delta) / unit
        statsd.timing('devhub.validation_results_processed_per_mb',
                      scaled_delta)

    log.info('Time to process and save upload validation; '
             'upload.pk={upload}; processing_time={delta}; '
             'scaled_per_mb={scaled}; upload_size_in_mb={size_in_mb}; '
             'created={created}; now={now}'
             .format(delta=delta, upload=upload.pk,
                     created=upload.created, now=now,
                     scaled=scaled_delta, size_in_mb=size_in_mb))
Пример #4
0
def handle_upload_validation_result(
        results, upload_pk, channel, is_mozilla_signed):
    """Annotate a set of validation results and save them to the given
    FileUpload instance."""
    upload = FileUpload.objects.get(pk=upload_pk)

    # Check for API keys in submissions.
    # Make sure it is extension-like, e.g. no LWT or search plugin
    try:
        results = check_for_api_keys_in_file(results=results, upload=upload)
    except (ValidationError, BadZipfile, IOError):
        pass

    # Annotate results with akismet reports results if there are any.
    reports = AkismetReport.objects.filter(upload_instance=upload)
    akismet_results = [
        (report.comment_type, report.result) for report in reports]
    annotations.annotate_akismet_spam_check(results, akismet_results)

    # Annotate results with potential webext warnings on new versions.
    if upload.addon_id and upload.version:
        annotations.annotate_webext_incompatibilities(
            results=results, file_=None, addon=upload.addon,
            version_string=upload.version, channel=channel)

    upload.validation = json.dumps(results)
    upload.save()  # We want to hit the custom save().

    # Track the time it took from first upload through validation
    # until the results were processed and saved.
    upload_start = utc_millesecs_from_epoch(upload.created)
    now = datetime.datetime.now()
    now_ts = utc_millesecs_from_epoch(now)
    delta = now_ts - upload_start
    statsd.timing('devhub.validation_results_processed', delta)

    if not storage.exists(upload.path):
        # TODO: actually fix this so we can get stats. It seems that
        # the file maybe gets moved but it needs more investigation.
        log.warning('Scaled upload stats were not tracked. File is '
                    'missing: {}'.format(upload.path))
        return

    size = Decimal(storage.size(upload.path))
    megabyte = Decimal(1024 * 1024)

    # Stash separate metrics for small / large files.
    quantifier = 'over' if size > megabyte else 'under'
    statsd.timing(
        'devhub.validation_results_processed_{}_1mb'.format(quantifier), delta)

    # Scale the upload / processing time by package size (in MB)
    # so we can normalize large XPIs which naturally take longer to validate.
    scaled_delta = None
    size_in_mb = size / megabyte
    if size > 0:
        # If the package is smaller than 1MB, don't scale it. This should
        # help account for validator setup time.
        unit = size_in_mb if size > megabyte else Decimal(1)
        scaled_delta = Decimal(delta) / unit
        statsd.timing('devhub.validation_results_processed_per_mb',
                      scaled_delta)

    log.info('Time to process and save upload validation; '
             'upload.pk={upload}; processing_time={delta}; '
             'scaled_per_mb={scaled}; upload_size_in_mb={size_in_mb}; '
             'created={created}; now={now}'
             .format(delta=delta, upload=upload.pk,
                     created=upload.created, now=now,
                     scaled=scaled_delta, size_in_mb=size_in_mb))
Пример #5
0
def handle_upload_validation_result(
        results, upload_pk, channel, is_mozilla_signed):
    """Annotate a set of validation results and save them to the given
    FileUpload instance."""
    upload = FileUpload.objects.get(pk=upload_pk)

    if waffle.switch_is_active('enable-yara') and results['errors'] == 0:
        # Run Yara. This cannot be asynchronous because we have no way to know
        # whether the task will complete before we attach a `Version` to it
        # later in the submission process... Because we cannot use `chord`
        # reliably right now (requires Celery 4.2+), this task is actually not
        # run as a task, it's a simple function call.
        #
        # TODO: use `run_yara` as a task in the submission chord once it is
        # possible. See: https://github.com/mozilla/addons-server/issues/12216
        run_yara(upload.pk)

    if waffle.switch_is_active('enable-customs') and results['errors'] == 0:
        # Run customs. This cannot be asynchronous because we have no way to
        # know whether the task will complete before we attach a `Version` to
        # it later in the submission process... Because we cannot use `chord`
        # reliably right now (requires Celery 4.2+), this task is actually not
        # run as a task, it's a simple function call.
        #
        # TODO: use `run_customs` as a task in the submission chord once it is
        # possible. See: https://github.com/mozilla/addons-server/issues/12217
        run_customs(upload.pk)

    if waffle.switch_is_active('enable-wat') and results['errors'] == 0:
        # Run wat. This cannot be asynchronous because we have no way to know
        # whether the task will complete before we attach a `Version` to it
        # later in the submission process... Because we cannot use `chord`
        # reliably right now (requires Celery 4.2+), this task is actually not
        # run as a task, it's a simple function call.
        #
        # TODO: use `run_wat` as a task in the submission chord once it is
        # possible. See: https://github.com/mozilla/addons-server/issues/12224
        run_wat(upload.pk)

    # Check for API keys in submissions.
    # Make sure it is extension-like, e.g. no search plugin
    try:
        results = check_for_api_keys_in_file(results=results, upload=upload)
    except (ValidationError, BadZipfile, IOError):
        pass

    # Annotate results with akismet reports results if there are any.
    reports = AkismetReport.objects.filter(upload_instance=upload)
    akismet_results = [
        (report.comment_type, report.result) for report in reports]
    annotations.annotate_akismet_spam_check(results, akismet_results)

    # Annotate results with potential webext warnings on new versions.
    if upload.addon_id and upload.version:
        annotations.annotate_webext_incompatibilities(
            results=results, file_=None, addon=upload.addon,
            version_string=upload.version, channel=channel)

    upload.validation = json.dumps(results)
    upload.save()  # We want to hit the custom save().

    # Track the time it took from first upload through validation
    # until the results were processed and saved.
    upload_start = utc_millesecs_from_epoch(upload.created)
    now = datetime.datetime.now()
    now_ts = utc_millesecs_from_epoch(now)
    delta = now_ts - upload_start
    statsd.timing('devhub.validation_results_processed', delta)

    if not storage.exists(upload.path):
        # TODO: actually fix this so we can get stats. It seems that
        # the file maybe gets moved but it needs more investigation.
        log.warning('Scaled upload stats were not tracked. File is '
                    'missing: {}'.format(upload.path))
        return

    size = Decimal(storage.size(upload.path))
    megabyte = Decimal(1024 * 1024)

    # Stash separate metrics for small / large files.
    quantifier = 'over' if size > megabyte else 'under'
    statsd.timing(
        'devhub.validation_results_processed_{}_1mb'.format(quantifier), delta)

    # Scale the upload / processing time by package size (in MB)
    # so we can normalize large XPIs which naturally take longer to validate.
    scaled_delta = None
    size_in_mb = size / megabyte
    if size > 0:
        # If the package is smaller than 1MB, don't scale it. This should
        # help account for validator setup time.
        unit = size_in_mb if size > megabyte else Decimal(1)
        scaled_delta = Decimal(delta) / unit
        statsd.timing('devhub.validation_results_processed_per_mb',
                      scaled_delta)

    log.info('Time to process and save upload validation; '
             'upload.pk={upload}; processing_time={delta}; '
             'scaled_per_mb={scaled}; upload_size_in_mb={size_in_mb}; '
             'created={created}; now={now}'
             .format(delta=delta, upload=upload.pk,
                     created=upload.created, now=now,
                     scaled=scaled_delta, size_in_mb=size_in_mb))