def test_limit_validator_warnings(self): data = { "errors": 0, "success": True, "warnings": 500, "notices": 0, "message_tree": {}, "messages": [{ "context": ["<code>", None], "description": ["Something something, see " "https://bugzilla.mozilla.org/"], "column": 0, "line": 1, "file": "chrome/content/down.html", "tier": 2, "message": "Some warning", "type": "warning", "id": [], "uid": "bb9948b604b111e09dfdc42c0301fe38"}] * 12, "metadata": {} } upload = FileUpload(validation=json.dumps(data)) validation = upload.escaped_validation() eq_(len(validation['messages']), 11) assert 'truncated' in validation['messages'][-1]['message'] eq_(validation['messages'][-1]['type'], 'warning')
def test_escaped_validation_ignores_bad_json(self): upload = FileUpload(validation='wtf') assert not upload._escaped_validation upload.save() assert not upload._escaped_validation eq_(upload.task_error.strip().split('\n')[-1], 'ValueError: No JSON object could be decoded')
def test_escaped_validation_will_escape_validation(self): upload = FileUpload(validation='{"messages": [{"the": "validation"}]}') assert not upload._escaped_validation upload.escaped_validation() eq_(json.loads(upload._escaped_validation), { "ending_tier": 0, "messages": [{ "the": "validation" }] })
def upload(self, addon, path): """Create FileUpload instance from local file.""" self.info('Creating FileUpload...') package_file = open(path) package_size = os.stat(path).st_size upload = FileUpload() upload.user = addon.authors.all()[0] upload.add_file(package_file.read(), 'marketplace-package.zip', package_size, is_webapp=True) self.info('Created FileUpload %s.' % upload) return upload
def test_ascii_names(self): fu = FileUpload.from_post("", u"jétpack.xpi", 0) assert "xpi" in fu.name fu = FileUpload.from_post("", u"мозила_србија-0.11-fx.xpi", 0) assert "xpi" in fu.name fu = FileUpload.from_post("", u"フォクすけといっしょ.xpi", 0) assert "xpi" in fu.name fu = FileUpload.from_post("", u"\u05d0\u05d5\u05e1\u05e3.xpi", 0) assert "xpi" in fu.name
def test_ascii_names(self): fu = FileUpload.from_post('', u'mözball.zip', 0) assert 'zip' in fu.name fu = FileUpload.from_post('', u'мозила_србија-0.11.zip', 0) assert 'zip' in fu.name fu = FileUpload.from_post('', u'フォクすけといっしょ.zip', 0) assert 'zip' in fu.name fu = FileUpload.from_post('', u'\u05d0\u05d5\u05e1\u05e3.zip', 0) assert 'zip' in fu.name
def test_ascii_names(self): fu = FileUpload.from_post('', u'jétpack.xpi', 0) assert 'xpi' in fu.name fu = FileUpload.from_post('', u'мозила_србија-0.11-fx.xpi', 0) assert 'xpi' in fu.name fu = FileUpload.from_post('', u'フォクすけといっしょ.xpi', 0) assert 'xpi' in fu.name fu = FileUpload.from_post('', u'\u05d0\u05d5\u05e1\u05e3.xpi', 0) assert 'xpi' in fu.name
def test_limit_validator_compat_errors(self): data = { "errors": 0, "success": True, "warnings": 100, "notices": 0, "message_tree": {}, "compatibility_summary": {"errors": 100, "warnings": 0, "notices": 0}, "messages": [ { "context": ["<code>", None], "description": ["Something something, see " "https://bugzilla.mozilla.org/"], "column": 0, "line": 1, "file": "chrome/content/down.html", "tier": 2, "message": "Some warning", "type": "warning", "compatibility_type": "warning", "id": [], "uid": "bb9948b604b111e09dfdc42c0301fe38", }, { "context": ["<code>", None], "description": ["Something something, see " "https://bugzilla.mozilla.org/"], "column": 0, "line": 1, "file": "chrome/content/down.html", "tier": 2, "message": "Some error", "type": "warning", "compatibility_type": "warning", "id": [], "uid": "bb9948b604b111e09dfdc42c0301fe38", }, ] * 50, "metadata": {}, } upload = FileUpload(validation=json.dumps(data)) validation = upload.escaped_validation() eq_(len(validation["messages"]), 11) assert "truncated" in validation["messages"][-1]["message"] eq_(validation["messages"][-1]["type"], "warning") validation = upload.escaped_validation(is_compatibility=True) eq_(len(validation["messages"]), 11) assert "truncated" in validation["messages"][-1]["message"] eq_(validation["messages"][-1]["type"], "error")
def get_upload(self, filename=None, abspath=None, validation=None): xpi = open(abspath if abspath else self.file_path(filename)).read() upload = FileUpload.from_post([xpi], filename=abspath or filename, size=1234) # Simulate what fetch_manifest() does after uploading an app. upload.validation = validation or json.dumps(dict(errors=0, warnings=1, notices=2, metadata={}, messages=[])) upload.save() return upload
def clean_upload(self): upload = self.cleaned_data['upload'] if upload.size > self.max_size: msg = 'Packaged app too large for submission.' big = json.dumps({ 'errors': 1, 'success': False, 'messages': [{ 'type': 'error', 'message': [ msg, 'Packages must be less than %s.' % filesizeformat(self.max_size)], 'tier': 1}]}) # Persist the error with this into FileUpload, but do not persist # the file contents, which are too large. self.file_upload = FileUpload.objects.create( is_webapp=True, user=self.user, validation=big) # Raise an error so the form is invalid. raise forms.ValidationError(msg) else: self.file_upload = FileUpload.from_post( upload, upload.name, upload.size, is_webapp=True) self.file_upload.user = self.user self.file_upload.save()
def clean_upload(self): upload = self.cleaned_data['upload'] if upload.size > self.max_size: msg = 'Packaged app too large for submission.' big = json.dumps({ 'errors': 1, 'success': False, 'messages': [{ 'type': 'error', 'message': [ msg, 'Packages must be less than %s.' % filesizeformat(self.max_size) ], 'tier': 1 }] }) # Persist the error with this into FileUpload, but do not persist # the file contents, which are too large. self.file_upload = FileUpload.objects.create(is_webapp=True, user=self.user, validation=big) # Raise an error so the form is invalid. raise forms.ValidationError(msg) else: self.file_upload = FileUpload.from_post(upload, upload.name, upload.size, is_webapp=True) self.file_upload.user = self.user self.file_upload.save()
def clean_upload(self): upload = self.cleaned_data["upload"] if upload.size > self.max_size: msg = "Packaged app too large for submission." big = json.dumps( { "errors": 1, "success": False, "messages": [ { "type": "error", "message": [msg, "Packages must be less than %s." % filesizeformat(self.max_size)], "tier": 1, } ], } ) # Persist the error with this into FileUpload, but do not persist # the file contents, which are too large. self.file_upload = FileUpload.objects.create(is_webapp=True, user=self.user, validation=big) # Raise an error so the form is invalid. raise forms.ValidationError(msg) else: self.file_upload = FileUpload.from_post(upload, upload.name, upload.size, is_webapp=True) self.file_upload.user = self.user self.file_upload.save()
def get_upload(self, filename, validation=None): xpi = open(self.file_path(filename)).read() upload = FileUpload.from_post([xpi], filename=filename, size=1234) upload.validation = (validation or json.dumps( dict(errors=0, warnings=1, notices=2, metadata={}))) upload.save() return upload
def get_upload(self, filename, validation=None): xpi = open(self.file_path(filename)).read() upload = FileUpload.from_post([xpi], filename=filename, size=1234) upload.validation = (validation or json.dumps(dict(errors=0, warnings=1, notices=2))) upload.save() return upload
def get_upload(self, filename=None, abspath=None, validation=None): xpi = open(abspath if abspath else self.file_path(filename)).read() upload = FileUpload.from_post([xpi], filename=abspath or filename, size=1234) upload.validation = (validation or json.dumps(dict(errors=0, warnings=1, notices=2, metadata={}, messages=[]))) upload.save() return upload
def get_upload(self, filename=None, abspath=None, validation=None): xpi = open(abspath if abspath else self.file_path(filename)).read() upload = FileUpload.from_post([xpi], filename=abspath or filename, size=1234) # Simulate what fetch_manifest() does after uploading an app. upload.validation = (validation or json.dumps( dict(errors=0, warnings=1, notices=2, metadata={}, messages=[]))) upload.save() return upload
def upload(request): if request.method == 'POST': filedata = request.FILES['upload'] fu = FileUpload.from_post(filedata, filedata.name, filedata.size) if request.user.is_authenticated(): fu.user = request.amo_user fu.save() tasks.validator.delay(fu.pk) return redirect('devhub.upload_detail', fu.pk, 'json') return jingo.render(request, 'devhub/upload.html')
def sniff(self, file_upload: FileUpload, encoding: str = settings.DEFAULT_CHARSET, limit: int = 5) -> SniffResult: try: with file_upload.open() as csv_file: has_header = unicodecsv.Sniffer().has_header( csv_file.read(1024).decode(encoding)) csv_file.seek(0) dialect = unicodecsv.Sniffer().sniff( csv_file.read(1024).decode(encoding)) csv_format_opts = dict(dialect=dialect, ) csv_file.seek(0) reader = unicodecsv.reader(csv_file, **csv_format_opts) if has_header: header = next(reader) else: header = None rows = list(islice(reader, max(0, limit))) if limit > 0 else [] except (UnicodeDecodeError, unicodecsv.Error) as e: raise ParsingException(str(e)) from e contact_serializer = self.get_contact_serializer(data={}) fields = { name: field for name, field in contact_serializer.get_fields().items() if not field.read_only } headers_mapping = {} if header: for num, name in enumerate(header): field_names = difflib.get_close_matches(name, fields.keys(), n=1) if field_names: fields_name = field_names[0] headers_mapping[fields_name] = num return SniffResult( dict( has_header=has_header, delimiter=dialect.delimiter, ), list(fields.keys()), rows, headers_mapping, )
def manifest_updated(self, manifest): """The manifest has updated, create a version and file.""" with open(manifest) as fh: chunks = fh.read() # We'll only create a file upload when we detect that the manifest # has changed, otherwise we'll be creating an awful lot of these. upload = FileUpload.from_post(chunks, manifest, len(chunks)) # This does most of the heavy work. Version.from_upload(upload, self, [Platform.objects.get(id=amo.PLATFORM_ALL.id)]) # Triggering this ensures that the current_version gets updated. self.update_version() amo.log(amo.LOG.MANIFEST_UPDATED, self)
def upload(request, addon_slug=None, is_standalone=False): filedata = request.FILES["upload"] fu = FileUpload.from_post(filedata, filedata.name, filedata.size, is_webapp=True) log.info("Packaged App FileUpload created: %s" % fu.pk) if request.user.is_authenticated(): fu.user = request.amo_user fu.save() tasks.validator.delay(fu.pk) if addon_slug: return redirect("mkt.developers.upload_detail_for_addon", addon_slug, fu.pk) elif is_standalone: return redirect("mkt.developers.standalone_upload_detail", fu.pk) else: return redirect("mkt.developers.upload_detail", fu.pk, "json")
def upload(request): if request.method == 'POST': #TODO(gkoberger): Bug 610800 - Don't load uploads into memory. filedata = request.raw_post_data try: filename = request.META['HTTP_X_FILE_NAME'] size = request.META['HTTP_X_FILE_SIZE'] except KeyError: return http.HttpResponseBadRequest() fu = FileUpload.from_post([filedata], filename, size) if request.user.is_authenticated(): fu.user = request.amo_user fu.save() tasks.validator.delay(fu.pk) return redirect('devhub.upload_detail', fu.pk, 'json') return jingo.render(request, 'devhub/upload.html')
def upload(request, addon_slug=None, is_standalone=False): filedata = request.FILES["upload"] fu = FileUpload.from_post(filedata, filedata.name, filedata.size) log.info("FileUpload created: %s" % fu.pk) if request.user.is_authenticated(): fu.user = request.amo_user fu.save() if request.POST.get("app_id") and request.POST.get("version_id"): app = get_object_or_404(Application, pk=request.POST["app_id"]) ver = get_object_or_404(AppVersion, pk=request.POST["version_id"]) tasks.compatibility_check.delay(fu.pk, app.guid, ver.version) else: tasks.validator.delay(fu.pk) if addon_slug: return redirect("mkt.developers.upload_detail_for_addon", addon_slug, fu.pk) elif is_standalone: return redirect("mkt.developers.standalone_upload_detail", fu.pk) else: return redirect("mkt.developers.upload_detail", fu.pk, "json")
def upload(request, addon_slug=None, is_standalone=False): filedata = request.FILES['upload'] fu = FileUpload.from_post(filedata, filedata.name, filedata.size) log.info('FileUpload created: %s' % fu.pk) if request.user.is_authenticated(): fu.user = request.amo_user fu.save() if request.POST.get('app_id') and request.POST.get('version_id'): app = get_object_or_404(Application, pk=request.POST['app_id']) ver = get_object_or_404(AppVersion, pk=request.POST['version_id']) tasks.compatibility_check.delay(fu.pk, app.guid, ver.version) else: tasks.validator.delay(fu.pk) if addon_slug: return redirect('mkt.developers.upload_detail_for_addon', addon_slug, fu.pk) elif is_standalone: return redirect('mkt.developers.standalone_upload_detail', fu.pk) else: return redirect('mkt.developers.upload_detail', fu.pk, 'json')
def test_escaped_validation_will_escape_validation(self): upload = FileUpload(validation='{"messages": [{"the": "validation"}]}') assert not upload._escaped_validation upload.escaped_validation() eq_(upload._escaped_validation, '{"ending_tier": 0, "messages": [{"the": "validation"}]}')
class NewPackagedAppForm(happyforms.Form): upload = forms.FileField() def __init__(self, *args, **kwargs): self.max_size = kwargs.pop('max_size', MAX_PACKAGED_APP_SIZE) self.user = kwargs.pop('user', get_user()) self.addon = kwargs.pop('addon', None) self.file_upload = None super(NewPackagedAppForm, self).__init__(*args, **kwargs) def clean_upload(self): upload = self.cleaned_data['upload'] errors = [] if upload.size > self.max_size: errors.append({ 'type': 'error', 'message': _('Packaged app too large for submission. Packages ' 'must be less than %s.' % filesizeformat( self.max_size)), 'tier': 1, }) manifest = None try: # Be careful to keep this as in-memory zip reading. manifest = ZipFile(upload, 'r').read('manifest.webapp') except Exception as e: errors.append({ 'type': 'error', 'message': _('Error extracting manifest from zip file.'), 'tier': 1, }) origin = None if manifest: try: origin = WebAppParser.decode_manifest(manifest).get('origin') except forms.ValidationError as e: errors.append({ 'type': 'error', 'message': ''.join(e.messages), 'tier': 1, }) if origin: try: verify_app_domain(origin, packaged=True, exclude=self.addon) except forms.ValidationError, e: errors.append({ 'type': 'error', 'message': ''.join(e.messages), 'tier': 1, }) if errors: # Persist the error with this into FileUpload, but do not persist # the file contents, which are too large. validation = { 'errors': len(errors), 'success': False, 'messages': errors, } self.file_upload = FileUpload.objects.create( is_webapp=True, user=self.user, name=getattr(upload, 'name', ''), validation=json.dumps(validation)) # Raise an error so the form is invalid. raise forms.ValidationError(' '.join( [e['message'] for e in errors][0])) # Everything passed validation. self.file_upload = FileUpload.from_post( upload, upload.name, upload.size, is_webapp=True) self.file_upload.user = self.user self.file_upload.save()
def upload(self): # The data should be in chunks. data = [''.join(x) for x in amo.utils.chunked(self.data, 3)] return FileUpload.from_post(data, 'filename.xpi', len(self.data))
def test_file_upload_passed_all_validations_invalid(): upload = FileUpload(valid=False, validation=json.dumps(amo.VALIDATOR_SKELETON_RESULTS)) assert not upload.passed_all_validations
chunks = [] size = 0 for chunk in req.iter_content(settings.LANGPACK_MAX_SIZE): size += len(chunk) # `requests` doesn't respect the Content-Length header # so we need to check twice. if size > settings.LANGPACK_MAX_SIZE: raise Exception('Response to big') chunks.append(chunk) except Exception, e: log.error('[@None] Error fetching "%s" language pack: %s' % (xpi, e)) continue upload = FileUpload() upload.add_file(chunks, xpi, size) # Activate the correct locale for the language pack so it # will be used as the add-on's default locale if available. translation.activate(lang) guid = '*****@*****.**' % lang try: addon = Addon.objects.get(guid=guid) except Addon.DoesNotExist: addon = None try: data = parse_addon(upload, addon)
def test_file_upload_passed_all_validations_processing(): upload = FileUpload(valid=False, validation='') assert not upload.passed_all_validations
class NewPackagedAppForm(happyforms.Form): upload = forms.FileField() def __init__(self, *args, **kwargs): self.max_size = kwargs.pop('max_size', MAX_PACKAGED_APP_SIZE) self.user = kwargs.pop('user', get_user()) self.addon = kwargs.pop('addon', None) self.file_upload = None super(NewPackagedAppForm, self).__init__(*args, **kwargs) def clean_upload(self): upload = self.cleaned_data['upload'] errors = [] if upload.size > self.max_size: errors.append({ 'type': 'error', 'message': _('Packaged app too large for submission. Packages ' 'must be smaller than %s.' % filesizeformat(self.max_size)), 'tier': 1, }) # Immediately raise an error, do not process the rest of the view, # which would read the file. raise self.persist_errors(errors, upload) manifest = None try: # Be careful to keep this as in-memory zip reading. manifest = ZipFile(upload, 'r').read('manifest.webapp') except Exception as e: errors.append({ 'type': 'error', 'message': _('Error extracting manifest from zip file.'), 'tier': 1, }) origin = None if manifest: try: origin = WebAppParser.decode_manifest(manifest).get('origin') except forms.ValidationError as e: errors.append({ 'type': 'error', 'message': ''.join(e.messages), 'tier': 1, }) if origin: try: verify_app_domain(origin, packaged=True, exclude=self.addon) except forms.ValidationError, e: errors.append({ 'type': 'error', 'message': ''.join(e.messages), 'tier': 1, }) if errors: raise self.persist_errors(errors, upload) # Everything passed validation. self.file_upload = FileUpload.from_post(upload, upload.name, upload.size, is_webapp=True) self.file_upload.user = self.user self.file_upload.save()
def test_file_upload_passed_auto_validation_failed(): upload = FileUpload(validation=json.dumps({ 'passed_auto_validation': False, })) assert not upload.passed_auto_validation
def test_limit_validator_compat_errors(self): data = { "errors": 0, "success": True, "warnings": 100, "notices": 0, "message_tree": {}, "compatibility_summary": { "errors": 100, "warnings": 0, "notices": 0 }, "messages": [{ "context": ["<code>", None], "description": ["Something something, see " "https://bugzilla.mozilla.org/"], "column": 0, "line": 1, "file": "chrome/content/down.html", "tier": 2, "message": "Some warning", "type": "warning", "compatibility_type": "warning", "id": [], "uid": "bb9948b604b111e09dfdc42c0301fe38" }, { "context": ["<code>", None], "description": ["Something something, see " "https://bugzilla.mozilla.org/"], "column": 0, "line": 1, "file": "chrome/content/down.html", "tier": 2, "message": "Some error", "type": "warning", "compatibility_type": "warning", "id": [], "uid": "bb9948b604b111e09dfdc42c0301fe38" }] * 50, "metadata": {} } upload = FileUpload(validation=json.dumps(data)) validation = upload.processed_validation assert len(validation['messages']) == 11 assert 'truncated' in validation['messages'][0]['message'] assert validation['messages'][0]['type'] == 'warning' upload = FileUpload(validation=json.dumps(data), compat_with_app=1) validation = upload.processed_validation assert len(validation['messages']) == 11 assert 'truncated' in validation['messages'][0]['message'] assert validation['messages'][0]['type'] == 'error'
def parse_and_import( self, file_upload: FileUpload, headers: Dict[str, int], has_headers: Optional[bool] = None, # todo: maybe it is better to accept dialect to give more options to configure delimiter: Optional[str] = None, encoding: str = settings.DEFAULT_CHARSET, allow_update: bool = True, atomic: bool = False, create_failed_rows_file: bool = False, detailed_errors_limit: int = 20, campaign: Optional[Campaign] = None, contact_list: Optional[ContactList] = None) -> ImportResult: indexes = {index: header for header, index in headers.items()} with file_upload.open() as csv_file: csv_format_opts = dict( dialect=unicodecsv.excel, encoding=encoding, ) try: if has_headers is None: has_headers = unicodecsv.Sniffer().has_header( csv_file.read(1024).decode(encoding)) csv_file.seek(0) if delimiter is None: dialect = unicodecsv.Sniffer().sniff( csv_file.read(1024).decode(encoding)) csv_format_opts['dialect'] = dialect csv_file.seek(0) else: csv_format_opts['delimiter'] = delimiter csv_reader = unicodecsv.reader(csv_file, **csv_format_opts) header = next(csv_reader) if has_headers else None process_rows = partial(self._process_rows, csv_reader, indexes, allow_update, atomic, detailed_errors_limit) except (UnicodeDecodeError, unicodecsv.Error) as e: raise ParsingException(str(e)) from e failed_rows_file_upload = None with transaction.atomic(savepoint=False): if not create_failed_rows_file: created_contacts, updated_contacts, skipped_contacts, errors = process_rows( None) else: with tempfile.TemporaryFile() as fp, transaction.atomic( savepoint=False): csv_writer = unicodecsv.writer(fp, **csv_format_opts) if header: csv_writer.writerow(header) created_contacts, updated_contacts, skipped_contacts, errors = process_rows( csv_writer.writerow) if errors: fp.seek(0) failed_rows_file_upload = FileUpload.objects.create( owner=file_upload.owner, uploader=FileUploader.SYSTEM, ttl=datetime.timedelta(days=2), file=File( fp, "failed-rows-from-%s" % file_upload.name)) if campaign: participating = set( campaign.contacts.values_list('id', flat=True)) Participation.objects.bulk_create((Participation( contact_id=contact_id, campaign=campaign, ) for contact_id in chain( created_contacts, filter( lambda contact_id: contact_id not in participating, updated_contacts)))) if contact_list: contact_list.contacts.add(*created_contacts) contact_list.contacts.add(*updated_contacts) return ImportResult(len(created_contacts), len(updated_contacts), len(skipped_contacts), errors, failed_rows_file_upload)
def test_file_upload_passed_auto_validation_passed(): upload = FileUpload(validation=json.dumps({ 'passed_auto_validation': True, })) assert upload.passed_auto_validation
def upload(self): # The data should be in chunks. data = list(amo.utils.chunked(self.data, 3)) return FileUpload.from_post(data, 'filename.xpi', len(self.data))