def from_upload(cls, upload, addon, platforms, send_signal=True, source=None, is_beta=False): data = utils.parse_addon(upload, addon) try: license = addon.versions.latest().license_id except Version.DoesNotExist: license = None max_len = cls._meta.get_field_by_name('_developer_name')[0].max_length developer = data.get('developer_name', '')[:max_len] v = cls.objects.create(addon=addon, version=data['version'], license_id=license, _developer_name=developer, source=source) log.info('New version: %r (%s) from %r' % (v, v.id, upload)) AV = ApplicationsVersions for app in data.get('apps', []): AV(version=v, min=app.min, max=app.max, application=app.id).save() if addon.type == amo.ADDON_SEARCH: # Search extensions are always for all platforms. platforms = [amo.PLATFORM_ALL.id] else: platforms = cls._make_safe_platform_files(platforms) for platform in platforms: File.from_upload(upload, v, platform, parse_data=data, is_beta=is_beta) v.disable_old_files() # After the upload has been copied to all platforms, remove the upload. storage.delete(upload.path) if send_signal: version_uploaded.send(sender=v) # Track the time it took from first upload through validation # (and whatever else) until a version was created. upload_start = utc_millesecs_from_epoch(upload.created) now = datetime.datetime.now() now_ts = utc_millesecs_from_epoch(now) upload_time = now_ts - upload_start log.info('Time for version {version} creation from upload: {delta}; ' 'created={created}; now={now}'.format(delta=upload_time, version=v, created=upload.created, now=now)) statsd.timing('devhub.version_created_from_upload', upload_time) return v
def from_upload(cls, upload, addon, platforms, send_signal=True, source=None, is_beta=False): data = utils.parse_addon(upload, addon) try: license = addon.versions.latest().license_id except Version.DoesNotExist: license = None max_len = cls._meta.get_field_by_name('_developer_name')[0].max_length developer = data.get('developer_name', '')[:max_len] v = cls.objects.create( addon=addon, version=data['version'], license_id=license, _developer_name=developer, source=source ) log.info('New version: %r (%s) from %r' % (v, v.id, upload)) AV = ApplicationsVersions for app in data.get('apps', []): AV(version=v, min=app.min, max=app.max, application=app.id).save() if addon.type == amo.ADDON_SEARCH: # Search extensions are always for all platforms. platforms = [amo.PLATFORM_ALL.id] else: platforms = cls._make_safe_platform_files(platforms) for platform in platforms: File.from_upload(upload, v, platform, parse_data=data, is_beta=is_beta) v.disable_old_files() # After the upload has been copied to all platforms, remove the upload. storage.delete(upload.path) if send_signal: version_uploaded.send(sender=v) # Track the time it took from first upload through validation # (and whatever else) until a version was created. upload_start = utc_millesecs_from_epoch(upload.created) now = datetime.datetime.now() now_ts = utc_millesecs_from_epoch(now) upload_time = now_ts - upload_start log.info('Time for version {version} creation from upload: {delta}; ' 'created={created}; now={now}' .format(delta=upload_time, version=v, created=upload.created, now=now)) statsd.timing('devhub.version_created_from_upload', upload_time) return v
def test_track_upload_validation_results_time(self): # Set created time back (just for sanity) otherwise the delta # would be in the microsecond range. self.upload.update(created=datetime.now() - timedelta(days=1)) validation = amo.VALIDATOR_SKELETON_RESULTS.copy() with mock.patch("devhub.tasks.statsd.timing") as mock_timing: tasks.handle_upload_validation_result(validation, self.upload.pk) assert self.get_upload().validation upload_start = utc_millesecs_from_epoch(self.upload.created) now = utc_millesecs_from_epoch() rough_delta = now - upload_start actual_delta = mock_timing.call_args[0][1] fuzz = 2000 # 2 seconds assert actual_delta >= (rough_delta - fuzz) and actual_delta <= (rough_delta + fuzz)
def test_track_run_time(self): minute_ago = datetime.datetime.now() - timedelta(minutes=1) task_start = utc_millesecs_from_epoch(minute_ago) self.cache.get.return_value = task_start fake_task.delay() approx_run_time = utc_millesecs_from_epoch() - task_start assert (self.statsd.timing.call_args[0][0] == 'tasks.apps.amo.tests.test_celery.fake_task') actual_run_time = self.statsd.timing.call_args[0][1] fuzz = 2000 # 2 seconds assert (actual_run_time >= (approx_run_time - fuzz) and actual_run_time <= (approx_run_time + fuzz)) assert self.cache.get.call_args[0][0].startswith('task_start_time') assert self.cache.delete.call_args[0][0].startswith('task_start_time')
def handle_upload_validation_result(results, upload_pk, annotate=True): """Annotates a set of validation results, unless `annotate` is false, and saves them to the given FileUpload instance.""" if annotate: results = annotate_validation_results(results) upload = FileUpload.objects.get(pk=upload_pk) upload.validation = json.dumps(results) upload.save() # We want to hit the custom save(). # Track the time it took from first upload through validation # until the results were processed and saved. upload_start = utc_millesecs_from_epoch(upload.created) now = datetime.datetime.now() now_ts = utc_millesecs_from_epoch(now) delta = now_ts - upload_start statsd.timing("devhub.validation_results_processed", delta) size = Decimal(storage.size(upload.path)) megabyte = Decimal(1024 * 1024) # Stash separate metrics for small / large files. quantifier = "over" if size > megabyte else "under" statsd.timing("devhub.validation_results_processed_{}_1mb".format(quantifier), delta) # Scale the upload / processing time by package size (in MB) # so we can normalize large XPIs which naturally take longer to validate. scaled_delta = None size_in_mb = size / megabyte if size > 0: # If the package is smaller than 1MB, don't scale it. This should # help account for validator setup time. unit = size_in_mb if size > megabyte else Decimal(1) scaled_delta = Decimal(delta) / unit statsd.timing("devhub.validation_results_processed_per_mb", scaled_delta) log.info( "Time to process and save upload validation; " "upload.pk={upload}; processing_time={delta}; " "scaled_per_mb={scaled}; upload_size_in_mb={size_in_mb}; " "created={created}; now={now}".format( delta=delta, upload=upload.pk, created=upload.created, now=now, scaled=scaled_delta, size_in_mb=size_in_mb ) )
def handle_upload_validation_result(results, upload_pk, annotate=True): """Annotates a set of validation results, unless `annotate` is false, and saves them to the given FileUpload instance.""" if annotate: results = annotate_validation_results(results) upload = FileUpload.objects.get(pk=upload_pk) upload.validation = json.dumps(results) upload.save() # We want to hit the custom save(). # Track the time it took from first upload through validation # until the results were processed and saved. upload_start = utc_millesecs_from_epoch(upload.created) now = datetime.datetime.now() now_ts = utc_millesecs_from_epoch(now) delta = now_ts - upload_start log.info('Time to process and save upload validation; ' 'upload.pk={upload}; processing time={delta}; ' 'created={created}; now={now}' .format(delta=delta, upload=upload.pk, created=upload.created, now=now)) statsd.timing('devhub.validation_results_processed', delta)
def __init__(self): self.current_datetime = datetime.datetime.now() self.current_epoch_ms = utc_millesecs_from_epoch( self.current_datetime)
def __init__(self): self.current_datetime = datetime.datetime.now() self.current_epoch_ms = utc_millesecs_from_epoch(self.current_datetime)
def approximate_upload_time(self): upload_start = utc_millesecs_from_epoch(self.upload.created) now = utc_millesecs_from_epoch() return now - upload_start