Ejemplo n.º 1
0
def validate_file_path(path, channel):
    """Run the validator against a file at the given path, and return the
    results.

    Should only be called directly by Validator or `validate_file` task.

    Search plugins don't call the linter but get linted by
    `annotate_search_plugin_validation`.

    All legacy extensions (including dictionaries, themes etc) are disabled
    via `annotate_legacy_addon_restrictions` except if they're signed by
    Mozilla.
    """
    if path.endswith('.xml'):
        # search plugins are validated directly by addons-server
        # so that we don't have to call the linter or validator
        results = deepcopy(amo.VALIDATOR_SKELETON_RESULTS)
        annotations.annotate_search_plugin_validation(
            results=results, file_path=path, channel=channel)
        return json.dumps(results)

    # Annotate results with potential legacy add-ons restrictions.
    data = parse_addon(path, minimal=True)
    is_webextension = data.get('is_webextension') is True
    is_mozilla_signed = data.get('is_mozilla_signed_extension', False)

    if not is_webextension:
        results = deepcopy(amo.VALIDATOR_SKELETON_RESULTS)
        annotations.annotate_legacy_addon_restrictions(
            path=path, results=results, parsed_data=data,
            error=not is_mozilla_signed)
        return json.dumps(results)

    return run_addons_linter(path, channel=channel)
Ejemplo n.º 2
0
def test_opensearch_validation(fixture, success, message):
    """Tests that the OpenSearch validation doesn't find anything worrying."""
    fixture_path = os.path.join(
        settings.ROOT, 'src/olympia/files/fixtures/files/opensearch/',
        fixture)

    results = {
        'messages': [],
        'errors': 0,
        'metadata': {}
    }

    annotations.annotate_search_plugin_validation(
        results, fixture_path, channel=amo.RELEASE_CHANNEL_LISTED)

    if success:
        assert not results['errors']
        assert not results['messages']
    else:
        assert results['errors']
        assert results['messages']

        expected = 'OpenSearch: {}'.format(message)
        assert any(
            message['message'] == expected for message in results['messages'])
Ejemplo n.º 3
0
def handle_file_validation_result(results, file_id, *args):
    """Annotate a set of validation results and save them to the given File
    instance."""

    file_ = File.objects.get(pk=file_id)

    annotations.annotate_webext_incompatibilities(
        results=results, file_=file_, addon=file_.version.addon,
        version_string=file_.version.version, channel=file_.version.channel)

    annotations.annotate_search_plugin_validation(
        results=results, file_path=file_.current_file_path,
        channel=file_.version.channel)

    return FileValidation.from_json(file_, results).pk
Ejemplo n.º 4
0
def validate_file_path(path, channel):
    """Run the validator against a file at the given path, and return the
    results, which should be a json string.

    Should only be called directly by `validate_upload` or `validate_file`
    tasks.

    Search plugins don't call the linter but get linted by
    `annotate_search_plugin_validation`.

    All legacy extensions (including dictionaries, themes etc) are disabled
    via `annotate_legacy_addon_restrictions` except if they're signed by
    Mozilla.
    """
    if path.endswith('.xml'):
        # search plugins are validated directly by addons-server
        # so that we don't have to call the linter or validator
        results = deepcopy(amo.VALIDATOR_SKELETON_RESULTS)
        annotations.annotate_search_plugin_validation(results=results,
                                                      file_path=path,
                                                      channel=channel)
        return json.dumps(results)

    # Annotate results with potential legacy add-ons restrictions.
    try:
        data = parse_addon(path, minimal=True)
    except NoManifestFound:
        # If no manifest is found, return empty data; the check below
        # explicitly looks for is_webextension is False, so it will not be
        # considered a legacy extension, and the linter will pick it up and
        # will know what message to return to the developer.
        data = {}
    except InvalidManifest:
        # Similarly, if we can't parse the manifest, let the linter pick that
        # up.
        data = {}
    is_legacy_extension = data.get('is_webextension', None) is False
    is_mozilla_signed = data.get('is_mozilla_signed_extension', None) is True

    if is_legacy_extension:
        results = deepcopy(amo.VALIDATOR_SKELETON_RESULTS)
        annotations.annotate_legacy_addon_restrictions(
            path=path,
            results=results,
            parsed_data=data,
            error=not is_mozilla_signed)
        return json.dumps(results)
    return run_addons_linter(path, channel=channel)
Ejemplo n.º 5
0
def test_opensearch_validation_rel_self_url():
    """Tests that rel=self urls are ignored for unlisted addons."""
    fixture_path = os.path.join(settings.ROOT,
                                'src/olympia/files/fixtures/files',
                                'opensearch/rel_self_url.xml')

    results = {'messages': [], 'errors': 0, 'metadata': {}}

    annotations.annotate_search_plugin_validation(
        results, fixture_path, channel=amo.RELEASE_CHANNEL_UNLISTED)

    assert not results['errors']

    annotations.annotate_search_plugin_validation(
        results, fixture_path, channel=amo.RELEASE_CHANNEL_LISTED)

    assert results['errors']
Ejemplo n.º 6
0
def validate_file_path(path, channel):
    """Run the validator against a file at the given path, and return the
    results, which should be a json string.

    Should only be called directly by `validate_upload` or `validate_file`
    tasks.

    Search plugins don't call the linter but get linted by
    `annotate_search_plugin_validation`.

    All legacy extensions (including dictionaries, themes etc) are disabled
    via `annotate_legacy_addon_restrictions` except if they're signed by
    Mozilla.
    """
    if path.endswith('.xml'):
        # search plugins are validated directly by addons-server
        # so that we don't have to call the linter or validator
        results = deepcopy(amo.VALIDATOR_SKELETON_RESULTS)
        annotations.annotate_search_plugin_validation(
            results=results, file_path=path, channel=channel)
        return json.dumps(results)

    # Annotate results with potential legacy add-ons restrictions.
    try:
        data = parse_addon(path, minimal=True)
    except NoManifestFound:
        # If no manifest is found, return empty data; the check below
        # explicitly looks for is_webextension is False, so it will not be
        # considered a legacy extension, and the linter will pick it up and
        # will know what message to return to the developer.
        data = {}
    except InvalidManifest:
        # Similarly, if we can't parse the manifest, let the linter pick that
        # up.
        data = {}
    is_legacy_extension = data.get('is_webextension', None) is False
    is_mozilla_signed = data.get('is_mozilla_signed_extension', None) is True

    if is_legacy_extension:
        results = deepcopy(amo.VALIDATOR_SKELETON_RESULTS)
        annotations.annotate_legacy_addon_restrictions(
            path=path, results=results, parsed_data=data,
            error=not is_mozilla_signed)
        return json.dumps(results)
    return run_addons_linter(path, channel=channel)
Ejemplo n.º 7
0
def handle_upload_validation_result(
        results, upload_pk, channel, is_mozilla_signed):
    """Annotate a set of validation results and save them to the given
    FileUpload instance."""
    upload = FileUpload.objects.get(pk=upload_pk)
    # Restrictions applying to new legacy submissions apply if:
    # - It's the very first upload (there is no addon id yet)
    # - It's the first upload in that channel
    is_new_upload = (
        not upload.addon_id or
        not upload.addon.find_latest_version(channel=channel, exclude=()))

    # Annotate results with potential legacy add-ons restrictions.
    if not is_mozilla_signed:
        results = annotations.annotate_legacy_addon_restrictions(
            results=results, is_new_upload=is_new_upload)

    annotations.annotate_legacy_langpack_restriction(results=results)

    annotations.annotate_search_plugin_validation(
        results=results, file_path=upload.path, channel=channel)

    # Check for API keys in submissions.
    # Make sure it is extension-like, e.g. no LWT or search plugin
    try:
        results = check_for_api_keys_in_file(results=results, upload=upload)
    except (ValidationError, BadZipfile, IOError):
        pass

    # Annotate results with potential webext warnings on new versions.
    if upload.addon_id and upload.version:
        results = annotations.annotate_webext_incompatibilities(
            results=results, file_=None, addon=upload.addon,
            version_string=upload.version, channel=channel)

    upload.validation = json.dumps(results)
    upload.save()  # We want to hit the custom save().

    # Track the time it took from first upload through validation
    # until the results were processed and saved.
    upload_start = utc_millesecs_from_epoch(upload.created)
    now = datetime.datetime.now()
    now_ts = utc_millesecs_from_epoch(now)
    delta = now_ts - upload_start
    statsd.timing('devhub.validation_results_processed', delta)

    if not storage.exists(upload.path):
        # TODO: actually fix this so we can get stats. It seems that
        # the file maybe gets moved but it needs more investigation.
        log.warning('Scaled upload stats were not tracked. File is '
                    'missing: {}'.format(upload.path))
        return

    size = Decimal(storage.size(upload.path))
    megabyte = Decimal(1024 * 1024)

    # Stash separate metrics for small / large files.
    quantifier = 'over' if size > megabyte else 'under'
    statsd.timing(
        'devhub.validation_results_processed_{}_1mb'.format(quantifier), delta)

    # Scale the upload / processing time by package size (in MB)
    # so we can normalize large XPIs which naturally take longer to validate.
    scaled_delta = None
    size_in_mb = size / megabyte
    if size > 0:
        # If the package is smaller than 1MB, don't scale it. This should
        # help account for validator setup time.
        unit = size_in_mb if size > megabyte else Decimal(1)
        scaled_delta = Decimal(delta) / unit
        statsd.timing('devhub.validation_results_processed_per_mb',
                      scaled_delta)

    log.info('Time to process and save upload validation; '
             'upload.pk={upload}; processing_time={delta}; '
             'scaled_per_mb={scaled}; upload_size_in_mb={size_in_mb}; '
             'created={created}; now={now}'
             .format(delta=delta, upload=upload.pk,
                     created=upload.created, now=now,
                     scaled=scaled_delta, size_in_mb=size_in_mb))