Пример #1
0
    def from_upload(cls, upload, addon, platforms, send_signal=True,
                    source=None, is_beta=False):
        from olympia.addons.models import AddonFeatureCompatibility

        data = utils.parse_addon(upload, addon)
        try:
            license = addon.versions.latest().license_id
        except Version.DoesNotExist:
            license = None
        v = cls.objects.create(
            addon=addon,
            version=data['version'],
            license_id=license,
            source=source
        )
        log.info('New version: %r (%s) from %r' % (v, v.id, upload))

        # Update the add-on e10s compatibility since we're creating a new
        # version that may change that.
        e10s_compatibility = data.get('e10s_compatibility')
        if e10s_compatibility is not None:
            feature_compatibility = (
                AddonFeatureCompatibility.objects.get_or_create(addon=addon)[0]
            )
            feature_compatibility.update(e10s=e10s_compatibility)

        AV = ApplicationsVersions
        for app in data.get('apps', []):
            AV(version=v, min=app.min, max=app.max,
               application=app.id).save()
        if addon.type == amo.ADDON_SEARCH:
            # Search extensions are always for all platforms.
            platforms = [amo.PLATFORM_ALL.id]
        else:
            platforms = cls._make_safe_platform_files(platforms)

        for platform in platforms:
            File.from_upload(upload, v, platform, parse_data=data,
                             is_beta=is_beta)

        v.disable_old_files()
        # After the upload has been copied to all platforms, remove the upload.
        storage.delete(upload.path)
        if send_signal:
            version_uploaded.send(sender=v)

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'
                 .format(delta=upload_time, version=v,
                         created=upload.created, now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return v
Пример #2
0
def handle_upload_validation_result(results, upload_pk, channel):
    """Annotate a set of validation results and save them to the given
    FileUpload instance."""
    upload = FileUpload.objects.get(pk=upload_pk)

    if not upload.addon_id:
        results = annotate_new_legacy_addon_restrictions(results=results)
    elif upload.addon_id and upload.version:
        results = annotate_webext_incompatibilities(
            results=results, file_=None, addon=upload.addon,
            version_string=upload.version, channel=channel)

    results = skip_signing_warning_if_signing_server_not_configured(results)
    upload.validation = json.dumps(results)
    upload.save()  # We want to hit the custom save().

    # Track the time it took from first upload through validation
    # until the results were processed and saved.
    upload_start = utc_millesecs_from_epoch(upload.created)
    now = datetime.datetime.now()
    now_ts = utc_millesecs_from_epoch(now)
    delta = now_ts - upload_start
    statsd.timing('devhub.validation_results_processed', delta)

    if not storage.exists(upload.path):
        # TODO: actually fix this so we can get stats. It seems that
        # the file maybe gets moved but it needs more investigation.
        log.warning('Scaled upload stats were not tracked. File is '
                    'missing: {}'.format(upload.path))
        return

    size = Decimal(storage.size(upload.path))
    megabyte = Decimal(1024 * 1024)

    # Stash separate metrics for small / large files.
    quantifier = 'over' if size > megabyte else 'under'
    statsd.timing(
        'devhub.validation_results_processed_{}_1mb'.format(quantifier), delta)

    # Scale the upload / processing time by package size (in MB)
    # so we can normalize large XPIs which naturally take longer to validate.
    scaled_delta = None
    size_in_mb = size / megabyte
    if size > 0:
        # If the package is smaller than 1MB, don't scale it. This should
        # help account for validator setup time.
        unit = size_in_mb if size > megabyte else Decimal(1)
        scaled_delta = Decimal(delta) / unit
        statsd.timing('devhub.validation_results_processed_per_mb',
                      scaled_delta)

    log.info('Time to process and save upload validation; '
             'upload.pk={upload}; processing_time={delta}; '
             'scaled_per_mb={scaled}; upload_size_in_mb={size_in_mb}; '
             'created={created}; now={now}'
             .format(delta=delta, upload=upload.pk,
                     created=upload.created, now=now,
                     scaled=scaled_delta, size_in_mb=size_in_mb))
Пример #3
0
    def from_upload(cls, upload, addon, platforms, send_signal=True,
                    source=None, is_beta=False):
        data = utils.parse_addon(upload, addon)
        try:
            license = addon.versions.latest().license_id
        except Version.DoesNotExist:
            license = None
        max_len = cls._meta.get_field_by_name('_developer_name')[0].max_length
        developer = data.get('developer_name', '')[:max_len]
        v = cls.objects.create(
            addon=addon,
            version=data['version'],
            license_id=license,
            _developer_name=developer,
            source=source
        )
        log.info('New version: %r (%s) from %r' % (v, v.id, upload))

        AV = ApplicationsVersions
        for app in data.get('apps', []):
            AV(version=v, min=app.min, max=app.max,
               application=app.id).save()
        if addon.type == amo.ADDON_SEARCH:
            # Search extensions are always for all platforms.
            platforms = [amo.PLATFORM_ALL.id]
        else:
            platforms = cls._make_safe_platform_files(platforms)

        for platform in platforms:
            File.from_upload(upload, v, platform, parse_data=data,
                             is_beta=is_beta)

        v.disable_old_files()
        # After the upload has been copied to all platforms, remove the upload.
        storage.delete(upload.path)
        if send_signal:
            version_uploaded.send(sender=v)

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'
                 .format(delta=upload_time, version=v,
                         created=upload.created, now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return v
Пример #4
0
def test_utc_millesecs_from_epoch():

    with freezegun.freeze_time('2018-11-18 06:05:04.030201'):
        timestamp = utc_millesecs_from_epoch()
    assert timestamp == 1542521104030

    future_now = datetime.datetime(2018, 11, 20, 4, 8, 15, 162342)
    timestamp = utc_millesecs_from_epoch(future_now)
    assert timestamp == 1542686895162

    new_timestamp = utc_millesecs_from_epoch(
        future_now + datetime.timedelta(milliseconds=42))
    assert new_timestamp == timestamp + 42
Пример #5
0
def handle_upload_validation_result(results, upload_pk, annotate=True):
    """Annotates a set of validation results, unless `annotate` is false, and
    saves them to the given FileUpload instance."""
    if annotate:
        results = annotate_validation_results(results)

    upload = FileUpload.objects.get(pk=upload_pk)
    upload.validation = json.dumps(results)
    upload.save()  # We want to hit the custom save().

    # Track the time it took from first upload through validation
    # until the results were processed and saved.
    upload_start = utc_millesecs_from_epoch(upload.created)
    now = datetime.datetime.now()
    now_ts = utc_millesecs_from_epoch(now)
    delta = now_ts - upload_start
    statsd.timing("devhub.validation_results_processed", delta)

    if not storage.exists(upload.path):
        # TODO: actually fix this so we can get stats. It seems that
        # the file maybe gets moved but it needs more investigation.
        log.warning("Scaled upload stats were not tracked. File is " "missing: {}".format(upload.path))
        return

    size = Decimal(storage.size(upload.path))
    megabyte = Decimal(1024 * 1024)

    # Stash separate metrics for small / large files.
    quantifier = "over" if size > megabyte else "under"
    statsd.timing("devhub.validation_results_processed_{}_1mb".format(quantifier), delta)

    # Scale the upload / processing time by package size (in MB)
    # so we can normalize large XPIs which naturally take longer to validate.
    scaled_delta = None
    size_in_mb = size / megabyte
    if size > 0:
        # If the package is smaller than 1MB, don't scale it. This should
        # help account for validator setup time.
        unit = size_in_mb if size > megabyte else Decimal(1)
        scaled_delta = Decimal(delta) / unit
        statsd.timing("devhub.validation_results_processed_per_mb", scaled_delta)

    log.info(
        "Time to process and save upload validation; "
        "upload.pk={upload}; processing_time={delta}; "
        "scaled_per_mb={scaled}; upload_size_in_mb={size_in_mb}; "
        "created={created}; now={now}".format(
            delta=delta, upload=upload.pk, created=upload.created, now=now, scaled=scaled_delta, size_in_mb=size_in_mb
        )
    )
Пример #6
0
    def test_track_run_time(self):
        minute_ago = datetime.datetime.now() - timedelta(minutes=1)
        task_start = utc_millesecs_from_epoch(minute_ago)
        self.cache.get.return_value = task_start

        fake_task.delay()

        approx_run_time = utc_millesecs_from_epoch() - task_start
        assert (self.statsd.timing.call_args[0][0] ==
                'tasks.olympia.amo.tests.test_cron.fake_task')
        actual_run_time = self.statsd.timing.call_args[0][1]

        fuzz = 2000  # 2 seconds
        assert (actual_run_time >= (approx_run_time - fuzz) and
                actual_run_time <= (approx_run_time + fuzz))

        assert self.cache.get.call_args[0][0].startswith('task_start_time')
        assert self.cache.delete.call_args[0][0].startswith('task_start_time')
Пример #7
0
    def test_track_upload_time(self):
        # Set created time back (just for sanity) otherwise the delta
        # would be in the microsecond range.
        self.upload.update(created=datetime.now() - timedelta(days=1))

        mock_timing_path = 'olympia.versions.models.statsd.timing'
        with mock.patch(mock_timing_path) as mock_timing:
            Version.from_upload(self.upload, self.addon, [self.platform],
                                amo.RELEASE_CHANNEL_LISTED)

            upload_start = utc_millesecs_from_epoch(self.upload.created)
            now = utc_millesecs_from_epoch()
            rough_delta = now - upload_start
            actual_delta = mock_timing.call_args[0][1]

            fuzz = 2000  # 2 seconds
            assert (actual_delta >= (rough_delta - fuzz) and
                    actual_delta <= (rough_delta + fuzz))
Пример #8
0
    def test_track_upload_time(self):
        # Set created time back (just for sanity) otherwise the delta
        # would be in the microsecond range.
        self.upload.update(created=datetime.now() - timedelta(days=1))

        mock_timing_path = 'olympia.versions.models.statsd.timing'
        with mock.patch(mock_timing_path) as mock_timing:
            Version.from_upload(
                self.upload, self.addon, [self.selected_app],
                amo.RELEASE_CHANNEL_LISTED,
                parsed_data=self.dummy_parsed_data)

            upload_start = utc_millesecs_from_epoch(self.upload.created)
            now = utc_millesecs_from_epoch()
            rough_delta = now - upload_start
            actual_delta = mock_timing.call_args[0][1]

            fuzz = 2000  # 2 seconds
            assert (actual_delta >= (rough_delta - fuzz) and
                    actual_delta <= (rough_delta + fuzz))
Пример #9
0
    def test_track_run_time(self, celery_statsd, celery_cache):
        minute_ago = datetime.datetime.now() - timedelta(minutes=1)
        task_start = utc_millesecs_from_epoch(minute_ago)
        celery_cache.get.return_value = task_start

        result = fake_task_with_result.delay()
        result.get()

        approx_run_time = utc_millesecs_from_epoch() - task_start
        assert (celery_statsd.timing.call_args[0][0] ==
                'tasks.olympia.amo.tests.test_celery.fake_task_with_result')
        actual_run_time = celery_statsd.timing.call_args[0][1]

        fuzz = 2000  # 2 seconds
        assert (actual_run_time >= (approx_run_time - fuzz)
                and actual_run_time <= (approx_run_time + fuzz))

        assert (
            celery_cache.get.call_args[0][0] == f'task_start_time.{result.id}')
        assert (celery_cache.delete.call_args[0][0] ==
                f'task_start_time.{result.id}')
Пример #10
0
    def from_upload(cls, upload, addon, selected_apps, channel,
                    parsed_data=None):
        """
        Create a Version instance and corresponding File(s) from a
        FileUpload, an Addon, a list of compatible app ids, a channel id and
        the parsed_data generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results.
        """
        from olympia.git.utils import create_git_extraction_entry

        assert parsed_data is not None

        if addon.status == amo.STATUS_DISABLED:
            raise VersionCreateError(
                'Addon is Mozilla Disabled; no new versions are allowed.')

        license_id = None
        if channel == amo.RELEASE_CHANNEL_LISTED:
            previous_version = addon.find_latest_version(
                channel=channel, exclude=())
            if previous_version and previous_version.license_id:
                license_id = previous_version.license_id
        approval_notes = None
        if parsed_data.get('is_mozilla_signed_extension'):
            approval_notes = (u'This version has been signed with '
                              u'Mozilla internal certificate.')
        version = cls.objects.create(
            addon=addon,
            approval_notes=approval_notes,
            version=parsed_data['version'],
            license_id=license_id,
            channel=channel,
        )
        email = upload.user.email if upload.user and upload.user.email else ''
        with core.override_remote_addr(upload.ip_address):
            log.info(
                'New version: %r (%s) from %r' % (version, version.id, upload),
                extra={
                    'email': email,
                    'guid': addon.guid,
                    'upload': upload.uuid.hex,
                    'user_id': upload.user_id,
                    'from_api': upload.source == amo.UPLOAD_SOURCE_API,
                }
            )
            activity.log_create(
                amo.LOG.ADD_VERSION, version, addon,
                user=upload.user or get_task_user())

        if addon.type == amo.ADDON_STATICTHEME:
            # We don't let developers select apps for static themes
            selected_apps = [app.id for app in amo.APP_USAGE]

        compatible_apps = {}
        for app in parsed_data.get('apps', []):
            if app.id not in selected_apps:
                # If the user chose to explicitly deselect Firefox for Android
                # we're not creating the respective `ApplicationsVersions`
                # which will have this add-on then be listed only for
                # Firefox specifically.
                continue

            compatible_apps[app.appdata] = ApplicationsVersions(
                version=version, min=app.min, max=app.max, application=app.id)
            compatible_apps[app.appdata].save()

        # See #2828: sometimes when we generate the filename(s) below, in
        # File.from_upload(), cache-machine is confused and has trouble
        # fetching the ApplicationsVersions that were just created. To work
        # around this we pre-generate version.compatible_apps and avoid the
        # queries completely.
        version._compatible_apps = compatible_apps

        # For backwards compatibility. We removed specific platform
        # support during submission but we don't handle it any different
        # beyond that yet. That means, we're going to simply set it
        # to `PLATFORM_ALL` and also have the backend create separate
        # files for each platform. Cleaning that up is another step.
        # Given the timing on this, we don't care about updates to legacy
        # add-ons as well.
        # Create relevant file and update the all_files cached property on the
        # Version, because we might need it afterwards.
        version.all_files = [File.from_upload(
            upload=upload, version=version, platform=amo.PLATFORM_ALL.id,
            parsed_data=parsed_data
        )]

        version.inherit_nomination(from_statuses=[amo.STATUS_AWAITING_REVIEW])
        version.disable_old_files()

        # After the upload has been copied to all platforms, remove the upload.
        storage.delete(upload.path)
        upload.path = ''
        upload.save()

        version_uploaded.send(instance=version, sender=Version)

        if version.is_webextension:
            if (
                    waffle.switch_is_active('enable-yara') or
                    waffle.switch_is_active('enable-customs') or
                    waffle.switch_is_active('enable-wat')
            ):
                ScannerResult.objects.filter(upload_id=upload.id).update(
                    version=version)

        if waffle.switch_is_active('enable-uploads-commit-to-git-storage'):
            # Schedule this version for git extraction.
            transaction.on_commit(
                lambda: create_git_extraction_entry(version=version)
            )

        # Generate a preview and icon for listed static themes
        if (addon.type == amo.ADDON_STATICTHEME and
                channel == amo.RELEASE_CHANNEL_LISTED):
            theme_data = parsed_data.get('theme', {})
            generate_static_theme_preview(theme_data, version.pk)

        # Authors need to be notified about auto-approval delay again since
        # they are submitting a new version.
        addon.reset_notified_about_auto_approval_delay()

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'
                 .format(delta=upload_time, version=version,
                         created=upload.created, now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return version
Пример #11
0
def handle_upload_validation_result(all_results, upload_pk, channel,
                                    is_mozilla_signed):
    """Annotate a set of validation results and save them to the given
    FileUpload instance.

    This task is the callback of the Celery chord in the validation chain. It
    receives all the results returned by all the tasks in this chord (in
    `all_results`).
    """
    # This task is the callback of a Celery chord and receives all the results
    # returned by all the tasks in this chord. The first task registered in the
    # chord is `forward_linter_results()`:
    results = all_results[0]

    upload = FileUpload.objects.get(pk=upload_pk)

    upload.validation = json.dumps(results)
    upload.save()  # We want to hit the custom save().

    # Track the time it took from first upload through validation
    # until the results were processed and saved.
    upload_start = utc_millesecs_from_epoch(upload.created)
    now = datetime.datetime.now()
    now_ts = utc_millesecs_from_epoch(now)
    delta = now_ts - upload_start
    statsd.timing('devhub.validation_results_processed', delta)

    if not storage.exists(upload.path):
        # TODO: actually fix this so we can get stats. It seems that
        # the file maybe gets moved but it needs more investigation.
        log.warning('Scaled upload stats were not tracked. File is '
                    'missing: {}'.format(upload.path))
        return

    size = Decimal(storage.size(upload.path))
    megabyte = Decimal(1024 * 1024)

    # Stash separate metrics for small / large files.
    quantifier = 'over' if size > megabyte else 'under'
    statsd.timing(
        'devhub.validation_results_processed_{}_1mb'.format(quantifier), delta)

    # Scale the upload / processing time by package size (in MB)
    # so we can normalize large XPIs which naturally take longer to validate.
    scaled_delta = None
    size_in_mb = size / megabyte
    if size > 0:
        # If the package is smaller than 1MB, don't scale it. This should
        # help account for validator setup time.
        unit = size_in_mb if size > megabyte else Decimal(1)
        scaled_delta = Decimal(delta) / unit
        statsd.timing('devhub.validation_results_processed_per_mb',
                      scaled_delta)

    log.info('Time to process and save upload validation; '
             'upload.pk={upload}; processing_time={delta}; '
             'scaled_per_mb={scaled}; upload_size_in_mb={size_in_mb}; '
             'created={created}; now={now}'.format(delta=delta,
                                                   upload=upload.pk,
                                                   created=upload.created,
                                                   now=now,
                                                   scaled=scaled_delta,
                                                   size_in_mb=size_in_mb))
Пример #12
0
 def __init__(self):
     self.current_datetime = datetime.datetime.now()
     self.current_epoch_ms = utc_millesecs_from_epoch(self.current_datetime)
Пример #13
0
def handle_upload_validation_result(results, upload_pk, channel):
    """Annotate a set of validation results and save them to the given
    FileUpload instance."""
    upload = FileUpload.objects.get(pk=upload_pk)

    if not upload.addon_id:
        results = annotate_new_legacy_addon_restrictions(results=results)
    elif upload.addon_id and upload.version:
        results = annotate_webext_incompatibilities(
            results=results,
            file_=None,
            addon=upload.addon,
            version_string=upload.version,
            channel=channel)

    results = skip_signing_warning_if_signing_server_not_configured(results)
    upload.validation = json.dumps(results)
    upload.save()  # We want to hit the custom save().

    # Track the time it took from first upload through validation
    # until the results were processed and saved.
    upload_start = utc_millesecs_from_epoch(upload.created)
    now = datetime.datetime.now()
    now_ts = utc_millesecs_from_epoch(now)
    delta = now_ts - upload_start
    statsd.timing('devhub.validation_results_processed', delta)

    if not storage.exists(upload.path):
        # TODO: actually fix this so we can get stats. It seems that
        # the file maybe gets moved but it needs more investigation.
        log.warning('Scaled upload stats were not tracked. File is '
                    'missing: {}'.format(upload.path))
        return

    size = Decimal(storage.size(upload.path))
    megabyte = Decimal(1024 * 1024)

    # Stash separate metrics for small / large files.
    quantifier = 'over' if size > megabyte else 'under'
    statsd.timing(
        'devhub.validation_results_processed_{}_1mb'.format(quantifier), delta)

    # Scale the upload / processing time by package size (in MB)
    # so we can normalize large XPIs which naturally take longer to validate.
    scaled_delta = None
    size_in_mb = size / megabyte
    if size > 0:
        # If the package is smaller than 1MB, don't scale it. This should
        # help account for validator setup time.
        unit = size_in_mb if size > megabyte else Decimal(1)
        scaled_delta = Decimal(delta) / unit
        statsd.timing('devhub.validation_results_processed_per_mb',
                      scaled_delta)

    log.info('Time to process and save upload validation; '
             'upload.pk={upload}; processing_time={delta}; '
             'scaled_per_mb={scaled}; upload_size_in_mb={size_in_mb}; '
             'created={created}; now={now}'.format(delta=delta,
                                                   upload=upload.pk,
                                                   created=upload.created,
                                                   now=now,
                                                   scaled=scaled_delta,
                                                   size_in_mb=size_in_mb))
Пример #14
0
    def from_upload(cls, upload, addon, selected_apps, channel,
                    parsed_data=None):
        """
        Create a Version instance and corresponding File(s) from a
        FileUpload, an Addon, a list of compatible app ids, a channel id and
        the parsed_data generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results.
        """
        assert parsed_data is not None

        from olympia.addons.models import AddonFeatureCompatibility

        if addon.status == amo.STATUS_DISABLED:
            raise VersionCreateError(
                'Addon is Mozilla Disabled; no new versions are allowed.')

        license_id = None
        if channel == amo.RELEASE_CHANNEL_LISTED:
            previous_version = addon.find_latest_version(
                channel=channel, exclude=())
            if previous_version and previous_version.license_id:
                license_id = previous_version.license_id
        approvalnotes = None
        if parsed_data.get('is_mozilla_signed_extension'):
            approvalnotes = (u'This version has been signed with '
                             u'Mozilla internal certificate.')
        version = cls.objects.create(
            addon=addon,
            approvalnotes=approvalnotes,
            version=parsed_data['version'],
            license_id=license_id,
            channel=channel,
        )
        log.info(
            'New version: %r (%s) from %r' % (version, version.id, upload))
        activity.log_create(amo.LOG.ADD_VERSION, version, addon)
        # Update the add-on e10s compatibility since we're creating a new
        # version that may change that.
        e10s_compatibility = parsed_data.get('e10s_compatibility')
        if e10s_compatibility is not None:
            feature_compatibility = (
                AddonFeatureCompatibility.objects.get_or_create(addon=addon)[0]
            )
            feature_compatibility.update(e10s=e10s_compatibility)

        compatible_apps = {}
        for app in parsed_data.get('apps', []):
            if app.id not in selected_apps:
                # If the user chose to explicitly deselect Firefox for Android
                # we're not creating the respective `ApplicationsVersions`
                # which will have this add-on then be listed only for
                # Firefox specifically.
                continue

            compatible_apps[app.appdata] = ApplicationsVersions(
                version=version, min=app.min, max=app.max, application=app.id)
            compatible_apps[app.appdata].save()

        # See #2828: sometimes when we generate the filename(s) below, in
        # File.from_upload(), cache-machine is confused and has trouble
        # fetching the ApplicationsVersions that were just created. To work
        # around this we pre-generate version.compatible_apps and avoid the
        # queries completely.
        version._compatible_apps = compatible_apps

        # For backwards compatibility. We removed specific platform
        # support during submission but we don't handle it any different
        # beyond that yet. That means, we're going to simply set it
        # to `PLATFORM_ALL` and also have the backend create separate
        # files for each platform. Cleaning that up is another step.
        # Given the timing on this, we don't care about updates to legacy
        # add-ons as well.
        # Create relevant file and update the all_files cached property on the
        # Version, because we might need it afterwards.
        version.all_files = [File.from_upload(
            upload=upload, version=version, platform=amo.PLATFORM_ALL.id,
            parsed_data=parsed_data
        )]

        version.inherit_nomination(from_statuses=[amo.STATUS_AWAITING_REVIEW])
        version.disable_old_files()
        # After the upload has been copied to all platforms, remove the upload.
        storage.delete(upload.path)
        version_uploaded.send(sender=version)

        if waffle.switch_is_active('enable-uploads-commit-to-git-storage'):
            # Extract into git repository
            AddonGitRepository.extract_and_commit_from_file_obj(
                file_obj=version.all_files[0],
                channel=channel,
                author=upload.user)

        # Generate a preview and icon for listed static themes
        if (addon.type == amo.ADDON_STATICTHEME and
                channel == amo.RELEASE_CHANNEL_LISTED):
            theme_data = parsed_data.get('theme', {})
            generate_static_theme_preview(theme_data, version.pk)

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'
                 .format(delta=upload_time, version=version,
                         created=upload.created, now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return version
Пример #15
0
    def from_upload(cls,
                    upload,
                    addon,
                    platforms,
                    channel,
                    source=None,
                    parsed_data=None):
        """
        Create a Version instance and corresponding File(s) from a
        FileUpload, an Addon, a list of platform ids, a channel id and the
        parsed_data generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results.
        """
        assert parsed_data is not None

        from olympia.addons.models import AddonFeatureCompatibility

        if addon.status == amo.STATUS_DISABLED:
            raise VersionCreateError(
                'Addon is Mozilla Disabled; no new versions are allowed.')

        license_id = None
        if channel == amo.RELEASE_CHANNEL_LISTED:
            previous_version = addon.find_latest_version(channel=channel,
                                                         exclude=())
            if previous_version and previous_version.license_id:
                license_id = previous_version.license_id
        version = cls.objects.create(
            addon=addon,
            version=parsed_data['version'],
            license_id=license_id,
            source=source,
            channel=channel,
        )
        log.info('New version: %r (%s) from %r' %
                 (version, version.id, upload))
        activity.log_create(amo.LOG.ADD_VERSION, version, addon)
        # Update the add-on e10s compatibility since we're creating a new
        # version that may change that.
        e10s_compatibility = parsed_data.get('e10s_compatibility')
        if e10s_compatibility is not None:
            feature_compatibility = (
                AddonFeatureCompatibility.objects.get_or_create(
                    addon=addon)[0])
            feature_compatibility.update(e10s=e10s_compatibility)

        compatible_apps = {}
        for app in parsed_data.get('apps', []):
            compatible_apps[app.appdata] = ApplicationsVersions(
                version=version, min=app.min, max=app.max, application=app.id)
            compatible_apps[app.appdata].save()

        # See #2828: sometimes when we generate the filename(s) below, in
        # File.from_upload(), cache-machine is confused and has trouble
        # fetching the ApplicationsVersions that were just created. To work
        # around this we pre-generate version.compatible_apps and avoid the
        # queries completely.
        version._compatible_apps = compatible_apps

        if addon.type in [amo.ADDON_SEARCH, amo.ADDON_STATICTHEME]:
            # Search extensions and static themes are always for all platforms.
            platforms = [amo.PLATFORM_ALL.id]
        else:
            platforms = cls._make_safe_platform_files(platforms)

        # Create as many files as we have platforms. Update the all_files
        # cached property on the Version while we're at it, because we might
        # need it afterwards.
        version.all_files = [
            File.from_upload(upload,
                             version,
                             platform,
                             parsed_data=parsed_data) for platform in platforms
        ]

        version.inherit_nomination(from_statuses=[amo.STATUS_AWAITING_REVIEW])
        version.disable_old_files()
        # After the upload has been copied to all platforms, remove the upload.
        storage.delete(upload.path)
        version_uploaded.send(sender=version)

        # Generate a preview and icon for listed static themes
        if (addon.type == amo.ADDON_STATICTHEME
                and channel == amo.RELEASE_CHANNEL_LISTED):
            dst_root = os.path.join(user_media_path('addons'), str(addon.id))
            theme_data = parsed_data.get('theme', {})
            version_root = os.path.join(dst_root, unicode(version.id))

            utils.extract_header_img(version.all_files[0].file_path,
                                     theme_data, version_root)
            preview = VersionPreview.objects.create(version=version)
            generate_static_theme_preview(theme_data, version_root, preview.pk)

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'.format(delta=upload_time,
                                                       version=version,
                                                       created=upload.created,
                                                       now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return version
Пример #16
0
    def from_upload(cls,
                    upload,
                    addon,
                    platforms,
                    channel,
                    send_signal=True,
                    source=None,
                    is_beta=False,
                    parsed_data=None):
        from olympia.addons.models import AddonFeatureCompatibility

        if addon.status == amo.STATUS_DISABLED:
            raise VersionCreateError(
                'Addon is Mozilla Disabled; no new versions are allowed.')

        if parsed_data is None:
            parsed_data = utils.parse_addon(upload, addon)
        license_id = None
        if channel == amo.RELEASE_CHANNEL_LISTED:
            previous_version = addon.find_latest_version(channel=channel,
                                                         exclude=())
            if previous_version and previous_version.license_id:
                license_id = previous_version.license_id
        version = cls.objects.create(
            addon=addon,
            version=parsed_data['version'],
            license_id=license_id,
            source=source,
            channel=channel,
        )
        log.info('New version: %r (%s) from %r' %
                 (version, version.id, upload))
        activity.log_create(amo.LOG.ADD_VERSION, version, addon)
        # Update the add-on e10s compatibility since we're creating a new
        # version that may change that.
        e10s_compatibility = parsed_data.get('e10s_compatibility')
        if e10s_compatibility is not None:
            feature_compatibility = (
                AddonFeatureCompatibility.objects.get_or_create(
                    addon=addon)[0])
            feature_compatibility.update(e10s=e10s_compatibility)

        compatible_apps = {}
        for app in parsed_data.get('apps', []):
            compatible_apps[app.appdata] = ApplicationsVersions(
                version=version, min=app.min, max=app.max, application=app.id)
            compatible_apps[app.appdata].save()

        # See #2828: sometimes when we generate the filename(s) below, in
        # File.from_upload(), cache-machine is confused and has trouble
        # fetching the ApplicationsVersions that were just created. To work
        # around this we pre-generate version.compatible_apps and avoid the
        # queries completely.
        version.compatible_apps = compatible_apps

        if addon.type == amo.ADDON_SEARCH:
            # Search extensions are always for all platforms.
            platforms = [amo.PLATFORM_ALL.id]
        else:
            platforms = cls._make_safe_platform_files(platforms)

        for platform in platforms:
            File.from_upload(upload,
                             version,
                             platform,
                             parsed_data=parsed_data,
                             is_beta=is_beta)

        version.inherit_nomination(from_statuses=[amo.STATUS_AWAITING_REVIEW])
        version.disable_old_files()
        # After the upload has been copied to all platforms, remove the upload.
        storage.delete(upload.path)
        if send_signal:
            version_uploaded.send(sender=version)

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'.format(delta=upload_time,
                                                       version=version,
                                                       created=upload.created,
                                                       now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return version
Пример #17
0
    def from_upload(cls, upload, addon, platforms, channel, send_signal=True,
                    source=None, is_beta=False):
        from olympia.addons.models import AddonFeatureCompatibility

        data = utils.parse_addon(upload, addon)
        try:
            license = addon.versions.latest().license_id
        except Version.DoesNotExist:
            license = None
        version = cls.objects.create(
            addon=addon,
            version=data['version'],
            license_id=license,
            source=source,
            channel=channel,
        )
        log.info(
            'New version: %r (%s) from %r' % (version, version.id, upload))

        # Update the add-on e10s compatibility since we're creating a new
        # version that may change that.
        e10s_compatibility = data.get('e10s_compatibility')
        if e10s_compatibility is not None:
            feature_compatibility = (
                AddonFeatureCompatibility.objects.get_or_create(addon=addon)[0]
            )
            feature_compatibility.update(e10s=e10s_compatibility)

        compatible_apps = {}
        for app in data.get('apps', []):
            compatible_apps[app.appdata] = ApplicationsVersions(
                version=version, min=app.min, max=app.max, application=app.id)
            compatible_apps[app.appdata].save()

        # See #2828: sometimes when we generate the filename(s) below, in
        # File.from_upload(), cache-machine is confused and has trouble
        # fetching the ApplicationsVersions that were just created. To work
        # around this we pre-generate version.compatible_apps and avoid the
        # queries completely.
        version.compatible_apps = compatible_apps

        if addon.type == amo.ADDON_SEARCH:
            # Search extensions are always for all platforms.
            platforms = [amo.PLATFORM_ALL.id]
        else:
            platforms = cls._make_safe_platform_files(platforms)

        for platform in platforms:
            File.from_upload(upload, version, platform, parse_data=data,
                             is_beta=is_beta)

        version.disable_old_files()
        # After the upload has been copied to all platforms, remove the upload.
        storage.delete(upload.path)
        if send_signal:
            version_uploaded.send(sender=version)

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'
                 .format(delta=upload_time, version=version,
                         created=upload.created, now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return version
Пример #18
0
def handle_upload_validation_result(
        results, upload_pk, channel, is_mozilla_signed):
    """Annotate a set of validation results and save them to the given
    FileUpload instance."""
    upload = FileUpload.objects.get(pk=upload_pk)

    if waffle.switch_is_active('enable-yara') and results['errors'] == 0:
        # Run Yara. This cannot be asynchronous because we have no way to know
        # whether the task will complete before we attach a `Version` to it
        # later in the submission process... Because we cannot use `chord`
        # reliably right now (requires Celery 4.2+), this task is actually not
        # run as a task, it's a simple function call.
        #
        # TODO: use `run_yara` as a task in the submission chord once it is
        # possible. See: https://github.com/mozilla/addons-server/issues/12216
        run_yara(upload.pk)

    if waffle.switch_is_active('enable-customs') and results['errors'] == 0:
        # Run customs. This cannot be asynchronous because we have no way to
        # know whether the task will complete before we attach a `Version` to
        # it later in the submission process... Because we cannot use `chord`
        # reliably right now (requires Celery 4.2+), this task is actually not
        # run as a task, it's a simple function call.
        #
        # TODO: use `run_customs` as a task in the submission chord once it is
        # possible. See: https://github.com/mozilla/addons-server/issues/12217
        run_customs(upload.pk)

    if waffle.switch_is_active('enable-wat') and results['errors'] == 0:
        # Run wat. This cannot be asynchronous because we have no way to know
        # whether the task will complete before we attach a `Version` to it
        # later in the submission process... Because we cannot use `chord`
        # reliably right now (requires Celery 4.2+), this task is actually not
        # run as a task, it's a simple function call.
        #
        # TODO: use `run_wat` as a task in the submission chord once it is
        # possible. See: https://github.com/mozilla/addons-server/issues/12224
        run_wat(upload.pk)

    # Check for API keys in submissions.
    # Make sure it is extension-like, e.g. no search plugin
    try:
        results = check_for_api_keys_in_file(results=results, upload=upload)
    except (ValidationError, BadZipfile, IOError):
        pass

    # Annotate results with potential webext warnings on new versions.
    if upload.addon_id and upload.version:
        annotations.annotate_webext_incompatibilities(
            results=results, file_=None, addon=upload.addon,
            version_string=upload.version, channel=channel)

    upload.validation = json.dumps(results)
    upload.save()  # We want to hit the custom save().

    # Track the time it took from first upload through validation
    # until the results were processed and saved.
    upload_start = utc_millesecs_from_epoch(upload.created)
    now = datetime.datetime.now()
    now_ts = utc_millesecs_from_epoch(now)
    delta = now_ts - upload_start
    statsd.timing('devhub.validation_results_processed', delta)

    if not storage.exists(upload.path):
        # TODO: actually fix this so we can get stats. It seems that
        # the file maybe gets moved but it needs more investigation.
        log.warning('Scaled upload stats were not tracked. File is '
                    'missing: {}'.format(upload.path))
        return

    size = Decimal(storage.size(upload.path))
    megabyte = Decimal(1024 * 1024)

    # Stash separate metrics for small / large files.
    quantifier = 'over' if size > megabyte else 'under'
    statsd.timing(
        'devhub.validation_results_processed_{}_1mb'.format(quantifier), delta)

    # Scale the upload / processing time by package size (in MB)
    # so we can normalize large XPIs which naturally take longer to validate.
    scaled_delta = None
    size_in_mb = size / megabyte
    if size > 0:
        # If the package is smaller than 1MB, don't scale it. This should
        # help account for validator setup time.
        unit = size_in_mb if size > megabyte else Decimal(1)
        scaled_delta = Decimal(delta) / unit
        statsd.timing('devhub.validation_results_processed_per_mb',
                      scaled_delta)

    log.info('Time to process and save upload validation; '
             'upload.pk={upload}; processing_time={delta}; '
             'scaled_per_mb={scaled}; upload_size_in_mb={size_in_mb}; '
             'created={created}; now={now}'
             .format(delta=delta, upload=upload.pk,
                     created=upload.created, now=now,
                     scaled=scaled_delta, size_in_mb=size_in_mb))
Пример #19
0
def handle_upload_validation_result(
        results, upload_pk, channel, is_mozilla_signed):
    """Annotate a set of validation results and save them to the given
    FileUpload instance."""
    upload = FileUpload.objects.get(pk=upload_pk)

    # Check for API keys in submissions.
    # Make sure it is extension-like, e.g. no LWT or search plugin
    try:
        results = check_for_api_keys_in_file(results=results, upload=upload)
    except (ValidationError, BadZipfile, IOError):
        pass

    # Annotate results with akismet reports results if there are any.
    reports = AkismetReport.objects.filter(upload_instance=upload)
    akismet_results = [
        (report.comment_type, report.result) for report in reports]
    annotations.annotate_akismet_spam_check(results, akismet_results)

    # Annotate results with potential webext warnings on new versions.
    if upload.addon_id and upload.version:
        annotations.annotate_webext_incompatibilities(
            results=results, file_=None, addon=upload.addon,
            version_string=upload.version, channel=channel)

    upload.validation = json.dumps(results)
    upload.save()  # We want to hit the custom save().

    # Track the time it took from first upload through validation
    # until the results were processed and saved.
    upload_start = utc_millesecs_from_epoch(upload.created)
    now = datetime.datetime.now()
    now_ts = utc_millesecs_from_epoch(now)
    delta = now_ts - upload_start
    statsd.timing('devhub.validation_results_processed', delta)

    if not storage.exists(upload.path):
        # TODO: actually fix this so we can get stats. It seems that
        # the file maybe gets moved but it needs more investigation.
        log.warning('Scaled upload stats were not tracked. File is '
                    'missing: {}'.format(upload.path))
        return

    size = Decimal(storage.size(upload.path))
    megabyte = Decimal(1024 * 1024)

    # Stash separate metrics for small / large files.
    quantifier = 'over' if size > megabyte else 'under'
    statsd.timing(
        'devhub.validation_results_processed_{}_1mb'.format(quantifier), delta)

    # Scale the upload / processing time by package size (in MB)
    # so we can normalize large XPIs which naturally take longer to validate.
    scaled_delta = None
    size_in_mb = size / megabyte
    if size > 0:
        # If the package is smaller than 1MB, don't scale it. This should
        # help account for validator setup time.
        unit = size_in_mb if size > megabyte else Decimal(1)
        scaled_delta = Decimal(delta) / unit
        statsd.timing('devhub.validation_results_processed_per_mb',
                      scaled_delta)

    log.info('Time to process and save upload validation; '
             'upload.pk={upload}; processing_time={delta}; '
             'scaled_per_mb={scaled}; upload_size_in_mb={size_in_mb}; '
             'created={created}; now={now}'
             .format(delta=delta, upload=upload.pk,
                     created=upload.created, now=now,
                     scaled=scaled_delta, size_in_mb=size_in_mb))
Пример #20
0
    def from_upload(cls, upload, addon, platforms, channel,
                    source=None, parsed_data=None):
        """
        Create a Version instance and corresponding File(s) from a
        FileUpload, an Addon, a list of platform ids, a channel id and the
        parsed_data generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results.
        """
        assert parsed_data is not None

        from olympia.addons.models import AddonFeatureCompatibility

        if addon.status == amo.STATUS_DISABLED:
            raise VersionCreateError(
                'Addon is Mozilla Disabled; no new versions are allowed.')

        license_id = None
        if channel == amo.RELEASE_CHANNEL_LISTED:
            previous_version = addon.find_latest_version(
                channel=channel, exclude=())
            if previous_version and previous_version.license_id:
                license_id = previous_version.license_id
        version = cls.objects.create(
            addon=addon,
            version=parsed_data['version'],
            license_id=license_id,
            source=source,
            channel=channel,
        )
        log.info(
            'New version: %r (%s) from %r' % (version, version.id, upload))
        activity.log_create(amo.LOG.ADD_VERSION, version, addon)
        # Update the add-on e10s compatibility since we're creating a new
        # version that may change that.
        e10s_compatibility = parsed_data.get('e10s_compatibility')
        if e10s_compatibility is not None:
            feature_compatibility = (
                AddonFeatureCompatibility.objects.get_or_create(addon=addon)[0]
            )
            feature_compatibility.update(e10s=e10s_compatibility)

        compatible_apps = {}
        for app in parsed_data.get('apps', []):
            compatible_apps[app.appdata] = ApplicationsVersions(
                version=version, min=app.min, max=app.max, application=app.id)
            compatible_apps[app.appdata].save()

        # See #2828: sometimes when we generate the filename(s) below, in
        # File.from_upload(), cache-machine is confused and has trouble
        # fetching the ApplicationsVersions that were just created. To work
        # around this we pre-generate version.compatible_apps and avoid the
        # queries completely.
        version._compatible_apps = compatible_apps

        if addon.type in [amo.ADDON_SEARCH, amo.ADDON_STATICTHEME]:
            # Search extensions and static themes are always for all platforms.
            platforms = [amo.PLATFORM_ALL.id]
        else:
            platforms = cls._make_safe_platform_files(platforms)

        # Create as many files as we have platforms. Update the all_files
        # cached property on the Version while we're at it, because we might
        # need it afterwards.
        version.all_files = [
            File.from_upload(
                upload, version, platform, parsed_data=parsed_data)
            for platform in platforms]

        version.inherit_nomination(from_statuses=[amo.STATUS_AWAITING_REVIEW])
        version.disable_old_files()
        # After the upload has been copied to all platforms, remove the upload.
        storage.delete(upload.path)
        version_uploaded.send(sender=version)

        # Generate a preview and icon for listed static themes
        if (addon.type == amo.ADDON_STATICTHEME and
                channel == amo.RELEASE_CHANNEL_LISTED):
            dst_root = os.path.join(user_media_path('addons'), str(addon.id))
            theme_data = parsed_data.get('theme', {})
            version_root = os.path.join(dst_root, unicode(version.id))

            utils.extract_header_img(
                version.all_files[0].file_path, theme_data, version_root)
            preview = VersionPreview.objects.create(version=version)
            generate_static_theme_preview(
                theme_data, version_root, preview.pk)

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'
                 .format(delta=upload_time, version=version,
                         created=upload.created, now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return version
Пример #21
0
    def from_upload(cls, upload, addon, platforms, channel, send_signal=True,
                    source=None, is_beta=False, parsed_data=None):
        from olympia.addons.models import AddonFeatureCompatibility

        if addon.status == amo.STATUS_DISABLED:
            raise VersionCreateError(
                'Addon is Mozilla Disabled; no new versions are allowed.')

        if parsed_data is None:
            parsed_data = utils.parse_addon(upload, addon)
        license_id = None
        if channel == amo.RELEASE_CHANNEL_LISTED:
            previous_version = addon.find_latest_version(
                channel=channel, exclude=())
            if previous_version and previous_version.license_id:
                license_id = previous_version.license_id
        version = cls.objects.create(
            addon=addon,
            version=parsed_data['version'],
            license_id=license_id,
            source=source,
            channel=channel,
        )
        log.info(
            'New version: %r (%s) from %r' % (version, version.id, upload))
        activity.log_create(amo.LOG.ADD_VERSION, version, addon)
        # Update the add-on e10s compatibility since we're creating a new
        # version that may change that.
        e10s_compatibility = parsed_data.get('e10s_compatibility')
        if e10s_compatibility is not None:
            feature_compatibility = (
                AddonFeatureCompatibility.objects.get_or_create(addon=addon)[0]
            )
            feature_compatibility.update(e10s=e10s_compatibility)

        compatible_apps = {}
        for app in parsed_data.get('apps', []):
            compatible_apps[app.appdata] = ApplicationsVersions(
                version=version, min=app.min, max=app.max, application=app.id)
            compatible_apps[app.appdata].save()

        # See #2828: sometimes when we generate the filename(s) below, in
        # File.from_upload(), cache-machine is confused and has trouble
        # fetching the ApplicationsVersions that were just created. To work
        # around this we pre-generate version.compatible_apps and avoid the
        # queries completely.
        version._compatible_apps = compatible_apps

        if addon.type in [amo.ADDON_SEARCH, amo.ADDON_STATICTHEME]:
            # Search extensions and static themes are always for all platforms.
            platforms = [amo.PLATFORM_ALL.id]
        else:
            platforms = cls._make_safe_platform_files(platforms)

        for platform in platforms:
            File.from_upload(upload, version, platform,
                             parsed_data=parsed_data, is_beta=is_beta)

        version.inherit_nomination(from_statuses=[amo.STATUS_AWAITING_REVIEW])
        version.disable_old_files()
        # After the upload has been copied to all platforms, remove the upload.
        storage.delete(upload.path)
        if send_signal:
            version_uploaded.send(sender=version)

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'
                 .format(delta=upload_time, version=version,
                         created=upload.created, now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return version
Пример #22
0
    def from_upload(cls,
                    upload,
                    addon,
                    selected_apps,
                    channel,
                    parsed_data=None):
        """
        Create a Version instance and corresponding File(s) from a
        FileUpload, an Addon, a list of compatible app ids, a channel id and
        the parsed_data generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results.
        """
        from olympia.addons.models import AddonReviewerFlags
        from olympia.addons.utils import RestrictionChecker
        from olympia.git.utils import create_git_extraction_entry

        assert parsed_data is not None

        if addon.status == amo.STATUS_DISABLED:
            raise VersionCreateError(
                'Addon is Mozilla Disabled; no new versions are allowed.')

        if upload.addon and upload.addon != addon:
            raise VersionCreateError(
                'FileUpload was made for a different Addon')

        if not upload.user or not upload.ip_address or not upload.source:
            raise VersionCreateError(
                'FileUpload does not have some required fields')

        if not upload.user.last_login_ip or not upload.user.email:
            raise VersionCreateError(
                'FileUpload user does not have some required fields')

        license_id = None
        if channel == amo.RELEASE_CHANNEL_LISTED:
            previous_version = addon.find_latest_version(channel=channel,
                                                         exclude=())
            if previous_version and previous_version.license_id:
                license_id = previous_version.license_id
        approval_notes = None
        if parsed_data.get('is_mozilla_signed_extension'):
            approval_notes = (
                'This version has been signed with Mozilla internal certificate.'
            )
        version = cls.objects.create(
            addon=addon,
            approval_notes=approval_notes,
            version=parsed_data['version'],
            license_id=license_id,
            channel=channel,
        )
        email = upload.user.email if upload.user and upload.user.email else ''
        with core.override_remote_addr(upload.ip_address):
            # The following log statement is used by foxsec-pipeline.
            # We override the IP because it might be called from a task and we
            # want the original IP from the submitter.
            log.info(
                f'New version: {version!r} ({version.id}) from {upload!r}',
                extra={
                    'email': email,
                    'guid': addon.guid,
                    'upload': upload.uuid.hex,
                    'user_id': upload.user_id,
                    'from_api': upload.source == amo.UPLOAD_SOURCE_API,
                },
            )
            activity.log_create(amo.LOG.ADD_VERSION,
                                version,
                                addon,
                                user=upload.user or get_task_user())

        if addon.type == amo.ADDON_STATICTHEME:
            # We don't let developers select apps for static themes
            selected_apps = [app.id for app in amo.APP_USAGE]

        compatible_apps = {}
        for app in parsed_data.get('apps', []):
            if app.id not in selected_apps:
                # If the user chose to explicitly deselect Firefox for Android
                # we're not creating the respective `ApplicationsVersions`
                # which will have this add-on then be listed only for
                # Firefox specifically.
                continue

            compatible_apps[app.appdata] = ApplicationsVersions(
                version=version, min=app.min, max=app.max, application=app.id)
            compatible_apps[app.appdata].save()

        # Pre-generate _compatible_apps property to avoid accidentally
        # triggering queries with that instance later.
        version._compatible_apps = compatible_apps

        # Create relevant file and update the all_files cached property on the
        # Version, because we might need it afterwards.
        version.all_files = [
            File.from_upload(
                upload=upload,
                version=version,
                parsed_data=parsed_data,
            )
        ]

        version.inherit_nomination(from_statuses=[amo.STATUS_AWAITING_REVIEW])
        version.disable_old_files()

        # After the upload has been copied to its permanent location, delete it
        # from storage. Keep the FileUpload instance (it gets cleaned up by a
        # cron eventually some time after its creation, in amo.cron.gc()),
        # making sure it's associated with the add-on instance.
        storage.delete(upload.path)
        upload.path = ''
        if upload.addon is None:
            upload.addon = addon
        upload.save()

        version_uploaded.send(instance=version, sender=Version)

        if version.is_webextension:
            if (waffle.switch_is_active('enable-yara')
                    or waffle.switch_is_active('enable-customs')
                    or waffle.switch_is_active('enable-wat')):
                ScannerResult.objects.filter(upload_id=upload.id).update(
                    version=version)

        if waffle.switch_is_active('enable-uploads-commit-to-git-storage'):
            # Schedule this version for git extraction.
            transaction.on_commit(
                lambda: create_git_extraction_entry(version=version))

        # Generate a preview and icon for listed static themes
        if (addon.type == amo.ADDON_STATICTHEME
                and channel == amo.RELEASE_CHANNEL_LISTED):
            theme_data = parsed_data.get('theme', {})
            generate_static_theme_preview(theme_data, version.pk)

        # Reset add-on reviewer flags to disable auto-approval and require
        # admin code review if the package has already been signed by mozilla.
        reviewer_flags_defaults = {}
        is_mozilla_signed = parsed_data.get('is_mozilla_signed_extension')
        if upload.validation_timeout:
            reviewer_flags_defaults['needs_admin_code_review'] = True
        if is_mozilla_signed and addon.type != amo.ADDON_LPAPP:
            reviewer_flags_defaults['needs_admin_code_review'] = True
            reviewer_flags_defaults['auto_approval_disabled'] = True

        # Check if the approval should be restricted
        if not RestrictionChecker(upload=upload).is_auto_approval_allowed():
            flag = ('auto_approval_disabled'
                    if channel == amo.RELEASE_CHANNEL_LISTED else
                    'auto_approval_disabled_unlisted')
            reviewer_flags_defaults[flag] = True

        if reviewer_flags_defaults:
            AddonReviewerFlags.objects.update_or_create(
                addon=addon, defaults=reviewer_flags_defaults)

        # Authors need to be notified about auto-approval delay again since
        # they are submitting a new version.
        addon.reset_notified_about_auto_approval_delay()

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'.format(delta=upload_time,
                                                       version=version,
                                                       created=upload.created,
                                                       now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return version
Пример #23
0
 def __init__(self):
     self.current_datetime = datetime.datetime.now()
     self.current_epoch_ms = utc_millesecs_from_epoch(
         self.current_datetime)
Пример #24
0
 def approximate_upload_time(self):
     upload_start = utc_millesecs_from_epoch(self.upload.created)
     now = utc_millesecs_from_epoch()
     return now - upload_start
Пример #25
0
 def __init__(self):
     from olympia.amo.utils import utc_millesecs_from_epoch
     self.current_datetime = datetime.datetime.now()
     self.current_epoch_ms = utc_millesecs_from_epoch(
         self.current_datetime)
Пример #26
0
def handle_upload_validation_result(results, upload_pk, channel,
                                    is_mozilla_signed):
    """Annotate a set of validation results and save them to the given
    FileUpload instance."""
    upload = FileUpload.objects.get(pk=upload_pk)
    # Restrictions applying to new legacy submissions apply if:
    # - It's the very first upload (there is no addon id yet)
    # - It's the first upload in that channel
    is_new_upload = (
        not upload.addon_id
        or not upload.addon.find_latest_version(channel=channel, exclude=()))

    # Annotate results with potential legacy add-ons restrictions.
    if not is_mozilla_signed:
        results = annotate_legacy_addon_restrictions(
            results=results, is_new_upload=is_new_upload)

    annotate_legacy_langpack_restriction(results=results)

    # Check for API keys in submissions.
    # Make sure it is extension-like, e.g. no LWT or search plugin
    try:
        results = check_for_api_keys_in_file(results=results, upload=upload)
    except (ValidationError, BadZipfile, IOError):
        pass

    # Annotate results with potential webext warnings on new versions.
    if upload.addon_id and upload.version:
        results = annotate_webext_incompatibilities(
            results=results,
            file_=None,
            addon=upload.addon,
            version_string=upload.version,
            channel=channel)

    upload.validation = json.dumps(results)
    upload.save()  # We want to hit the custom save().

    # Track the time it took from first upload through validation
    # until the results were processed and saved.
    upload_start = utc_millesecs_from_epoch(upload.created)
    now = datetime.datetime.now()
    now_ts = utc_millesecs_from_epoch(now)
    delta = now_ts - upload_start
    statsd.timing('devhub.validation_results_processed', delta)

    if not storage.exists(upload.path):
        # TODO: actually fix this so we can get stats. It seems that
        # the file maybe gets moved but it needs more investigation.
        log.warning('Scaled upload stats were not tracked. File is '
                    'missing: {}'.format(upload.path))
        return

    size = Decimal(storage.size(upload.path))
    megabyte = Decimal(1024 * 1024)

    # Stash separate metrics for small / large files.
    quantifier = 'over' if size > megabyte else 'under'
    statsd.timing(
        'devhub.validation_results_processed_{}_1mb'.format(quantifier), delta)

    # Scale the upload / processing time by package size (in MB)
    # so we can normalize large XPIs which naturally take longer to validate.
    scaled_delta = None
    size_in_mb = size / megabyte
    if size > 0:
        # If the package is smaller than 1MB, don't scale it. This should
        # help account for validator setup time.
        unit = size_in_mb if size > megabyte else Decimal(1)
        scaled_delta = Decimal(delta) / unit
        statsd.timing('devhub.validation_results_processed_per_mb',
                      scaled_delta)

    log.info('Time to process and save upload validation; '
             'upload.pk={upload}; processing_time={delta}; '
             'scaled_per_mb={scaled}; upload_size_in_mb={size_in_mb}; '
             'created={created}; now={now}'.format(delta=delta,
                                                   upload=upload.pk,
                                                   created=upload.created,
                                                   now=now,
                                                   scaled=scaled_delta,
                                                   size_in_mb=size_in_mb))
Пример #27
0
    def from_upload(cls,
                    upload,
                    addon,
                    selected_apps,
                    channel,
                    parsed_data=None):
        """
        Create a Version instance and corresponding File(s) from a
        FileUpload, an Addon, a list of compatible app ids, a channel id and
        the parsed_data generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results.
        """
        assert parsed_data is not None

        from olympia.addons.models import AddonFeatureCompatibility

        if addon.status == amo.STATUS_DISABLED:
            raise VersionCreateError(
                'Addon is Mozilla Disabled; no new versions are allowed.')

        license_id = None
        if channel == amo.RELEASE_CHANNEL_LISTED:
            previous_version = addon.find_latest_version(channel=channel,
                                                         exclude=())
            if previous_version and previous_version.license_id:
                license_id = previous_version.license_id
        approvalnotes = None
        if parsed_data.get('is_mozilla_signed_extension'):
            approvalnotes = (u'This version has been signed with '
                             u'Mozilla internal certificate.')
        version = cls.objects.create(
            addon=addon,
            approvalnotes=approvalnotes,
            version=parsed_data['version'],
            license_id=license_id,
            channel=channel,
        )
        log.info('New version: %r (%s) from %r' %
                 (version, version.id, upload))
        activity.log_create(amo.LOG.ADD_VERSION, version, addon)
        # Update the add-on e10s compatibility since we're creating a new
        # version that may change that.
        e10s_compatibility = parsed_data.get('e10s_compatibility')
        if e10s_compatibility is not None:
            feature_compatibility = (
                AddonFeatureCompatibility.objects.get_or_create(
                    addon=addon)[0])
            feature_compatibility.update(e10s=e10s_compatibility)

        compatible_apps = {}
        for app in parsed_data.get('apps', []):
            if app.id not in selected_apps:
                # If the user chose to explicitly deselect Firefox for Android
                # we're not creating the respective `ApplicationsVersions`
                # which will have this add-on then be listed only for
                # Firefox specifically.
                continue

            compatible_apps[app.appdata] = ApplicationsVersions(
                version=version, min=app.min, max=app.max, application=app.id)
            compatible_apps[app.appdata].save()

        # See #2828: sometimes when we generate the filename(s) below, in
        # File.from_upload(), cache-machine is confused and has trouble
        # fetching the ApplicationsVersions that were just created. To work
        # around this we pre-generate version.compatible_apps and avoid the
        # queries completely.
        version._compatible_apps = compatible_apps

        # For backwards compatibility. We removed specific platform
        # support during submission but we don't handle it any different
        # beyond that yet. That means, we're going to simply set it
        # to `PLATFORM_ALL` and also have the backend create separate
        # files for each platform. Cleaning that up is another step.
        # Given the timing on this, we don't care about updates to legacy
        # add-ons as well.
        # Create relevant file and update the all_files cached property on the
        # Version, because we might need it afterwards.
        version.all_files = [
            File.from_upload(upload=upload,
                             version=version,
                             platform=amo.PLATFORM_ALL.id,
                             parsed_data=parsed_data)
        ]

        version.inherit_nomination(from_statuses=[amo.STATUS_AWAITING_REVIEW])
        version.disable_old_files()
        # After the upload has been copied to all platforms, remove the upload.
        storage.delete(upload.path)
        version_uploaded.send(sender=version)

        if waffle.switch_is_active('enable-uploads-commit-to-git-storage'):
            # Extract into git repository
            AddonGitRepository.extract_and_commit_from_file_obj(
                file_obj=version.all_files[0],
                channel=channel,
                author=upload.user)

        # Generate a preview and icon for listed static themes
        if (addon.type == amo.ADDON_STATICTHEME
                and channel == amo.RELEASE_CHANNEL_LISTED):
            dst_root = os.path.join(user_media_path('addons'), str(addon.id))
            theme_data = parsed_data.get('theme', {})
            version_root = os.path.join(dst_root, unicode(version.id))

            utils.extract_header_img(version.all_files[0].file_path,
                                     theme_data, version_root)
            generate_static_theme_preview(theme_data, version_root, version.pk)

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'.format(delta=upload_time,
                                                       version=version,
                                                       created=upload.created,
                                                       now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return version
Пример #28
0
 def __init__(self):
     from olympia.amo.utils import utc_millesecs_from_epoch
     self.current_datetime = datetime.datetime.now()
     self.current_epoch_ms = utc_millesecs_from_epoch(self.current_datetime)
Пример #29
0
    def from_upload(cls,
                    upload,
                    addon,
                    platforms,
                    send_signal=True,
                    source=None,
                    is_beta=False):
        from olympia.addons.models import AddonFeatureCompatibility

        data = utils.parse_addon(upload, addon)
        try:
            license = addon.versions.latest().license_id
        except Version.DoesNotExist:
            license = None
        v = cls.objects.create(addon=addon,
                               version=data['version'],
                               license_id=license,
                               source=source)
        log.info('New version: %r (%s) from %r' % (v, v.id, upload))

        # Update the add-on e10s compatibility since we're creating a new
        # version that may change that.
        e10s_compatibility = data.get('e10s_compatibility')
        if e10s_compatibility is not None:
            feature_compatibility = (
                AddonFeatureCompatibility.objects.get_or_create(
                    addon=addon)[0])
            feature_compatibility.update(e10s=e10s_compatibility)

        AV = ApplicationsVersions
        for app in data.get('apps', []):
            AV(version=v, min=app.min, max=app.max, application=app.id).save()
        if addon.type == amo.ADDON_SEARCH:
            # Search extensions are always for all platforms.
            platforms = [amo.PLATFORM_ALL.id]
        else:
            platforms = cls._make_safe_platform_files(platforms)

        for platform in platforms:
            File.from_upload(upload,
                             v,
                             platform,
                             parse_data=data,
                             is_beta=is_beta)

        v.disable_old_files()
        # After the upload has been copied to all platforms, remove the upload.
        storage.delete(upload.path)
        if send_signal:
            version_uploaded.send(sender=v)

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'.format(delta=upload_time,
                                                       version=v,
                                                       created=upload.created,
                                                       now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return v