Beispiel #1
0
    def from_upload(cls, upload, version, parse_data={}):
        upload.path = smart_path(nfd_str(upload.path))
        ext = os.path.splitext(upload.path)[1]

        f = cls(version=version)
        f.filename = f.generate_filename(extension=ext or '.zip')
        f.size = private_storage.size(upload.path)  # Size in bytes.
        f.status = mkt.STATUS_PENDING
        # Re-use the file-upload hash if we can, no need to regenerate a new
        # one if we can avoid that.
        f.hash = upload.hash or f.generate_hash(upload.path)
        f.save()

        log.debug('New file: %r from %r' % (f, upload))

        # Move the uploaded file from the temp location.
        copy_stored_file(
            upload.path,
            os.path.join(version.path_prefix, nfd_str(f.filename)),
            src_storage=private_storage,
            dst_storage=private_storage)

        if upload.validation:
            FileValidation.from_json(f, upload.validation)

        return f
Beispiel #2
0
    def from_upload(cls, upload, version, parse_data={}):
        upload.path = smart_path(nfd_str(upload.path))
        ext = os.path.splitext(upload.path)[1]

        f = cls(version=version)
        f.filename = f.generate_filename(extension=ext or '.zip')
        f.size = private_storage.size(upload.path)  # Size in bytes.
        f.status = mkt.STATUS_PENDING
        # Re-use the file-upload hash if we can, no need to regenerate a new
        # one if we can avoid that.
        f.hash = upload.hash or f.generate_hash(upload.path)
        f.save()

        log.debug('New file: %r from %r' % (f, upload))

        # Move the uploaded file from the temp location.
        copy_stored_file(
            upload.path,
            os.path.join(version.path_prefix, nfd_str(f.filename)),
            src_storage=private_storage,
            dst_storage=private_storage)

        if upload.validation:
            FileValidation.from_json(f, upload.validation)

        return f
Beispiel #3
0
    def remove_public_signed_file(self):
        """Remove the public signed file if it exists.

        Return the size of the unsigned file, to be used by the caller to
        update the size property on the current instance."""
        if public_storage.exists(self.signed_file_path):
            public_storage.delete(self.signed_file_path)
        return private_storage.size(self.file_path)
Beispiel #4
0
    def remove_public_signed_file(self):
        """Remove the public signed file if it exists.

        Return the size of the unsigned file, to be used by the caller to
        update the size property on the current instance."""
        if public_storage.exists(self.signed_file_path):
            public_storage.delete(self.signed_file_path)
        return private_storage.size(self.file_path)
Beispiel #5
0
    def _get_files(self):
        all_files, res = [], OrderedDict()

        # Not using os.path.walk so we get just the right order.
        def iterate(path):
            path_dirs, path_files = private_storage.listdir(path)
            for dirname in sorted(path_dirs):
                full = os.path.join(path, dirname)
                all_files.append(full)
                iterate(full)

            for filename in sorted(path_files):
                full = os.path.join(path, filename)
                all_files.append(full)

        iterate(self.dest)

        for path in all_files:
            filename = smart_unicode(os.path.basename(path), errors='replace')
            short = smart_unicode(path[len(self.dest) + 1:], errors='replace')
            mime, encoding = mimetypes.guess_type(filename)
            if not mime and filename == 'manifest.webapp':
                mime = 'application/x-web-app-manifest+json'
            if storage_is_remote():
                # S3 doesn't have directories, so we check for names with this
                # prefix and call it a directory if there are some.
                subdirs, subfiles = private_storage.listdir(path)
                directory = bool(subdirs or subfiles)
            else:
                directory = os.path.isdir(path)

            res[short] = {
                'binary': self._is_binary(mime, path),
                'depth': short.count(os.sep),
                'directory': directory,
                'filename': filename,
                'full': path,
                'md5': get_md5(path) if not directory else '',
                'mimetype': mime or 'application/octet-stream',
                'syntax': self.get_syntax(filename),
                'modified': (
                    time.mktime(
                        private_storage.modified_time(path).timetuple())
                    if not directory else 0),
                'short': short,
                'size': private_storage.size(path) if not directory else 0,
                'truncated': self.truncate(filename),
                'url': reverse('mkt.files.list',
                               args=[self.file.id, 'file', short]),
                'url_serve': reverse('mkt.files.redirect',
                                     args=[self.file.id, short]),
                'version': self.file.version.version,
            }

        return res
Beispiel #6
0
    def handle_file_upload_operations(self, upload):
        """Copy the file attached to a FileUpload to the Extension instance.

        Return the file size."""
        upload.path = smart_path(nfd_str(upload.path))

        if private_storage.exists(self.file_path):
            # The filename should not exist. If it does, it means we are trying
            # to re-upload the same version. This should have been caught
            # before, so just raise an exception.
            raise RuntimeError(
                'Trying to upload a file to a destination that already exists:'
                ' %s' % self.file_path)

        # Copy file from fileupload. This uses private_storage for now as the
        # unreviewed, unsigned filename is private.
        copy_stored_file(
            upload.path, self.file_path,
            src_storage=private_storage, dst_storage=private_storage)

        return private_storage.size(self.file_path)
Beispiel #7
0
    def handle_file_upload_operations(self, upload):
        """Copy the file attached to a FileUpload to the Extension instance.

        Return the file size."""
        upload.path = smart_path(nfd_str(upload.path))

        if private_storage.exists(self.file_path):
            # The filename should not exist. If it does, it means we are trying
            # to re-upload the same version. This should have been caught
            # before, so just raise an exception.
            raise RuntimeError(
                'Trying to upload a file to a destination that already exists:'
                ' %s' % self.file_path)

        # Copy file from fileupload. This uses private_storage for now as the
        # unreviewed, unsigned filename is private.
        copy_stored_file(upload.path,
                         self.file_path,
                         src_storage=private_storage,
                         dst_storage=private_storage)

        return private_storage.size(self.file_path)
Beispiel #8
0
    def _get_files(self):
        all_files, res = [], OrderedDict()

        # Not using os.path.walk so we get just the right order.
        def iterate(path):
            path_dirs, path_files = private_storage.listdir(path)
            for dirname in sorted(path_dirs):
                full = os.path.join(path, dirname)
                all_files.append(full)
                iterate(full)

            for filename in sorted(path_files):
                full = os.path.join(path, filename)
                all_files.append(full)

        iterate(self.dest)

        for path in all_files:
            filename = smart_unicode(os.path.basename(path), errors='replace')
            short = smart_unicode(path[len(self.dest) + 1:], errors='replace')
            mime, encoding = mimetypes.guess_type(filename)
            if not mime and filename == 'manifest.webapp':
                mime = 'application/x-web-app-manifest+json'
            if storage_is_remote():
                # S3 doesn't have directories, so we check for names with this
                # prefix and call it a directory if there are some.
                subdirs, subfiles = private_storage.listdir(path)
                directory = bool(subdirs or subfiles)
            else:
                directory = os.path.isdir(path)

            res[short] = {
                'binary':
                self._is_binary(mime, path),
                'depth':
                short.count(os.sep),
                'directory':
                directory,
                'filename':
                filename,
                'full':
                path,
                'md5':
                get_md5(path) if not directory else '',
                'mimetype':
                mime or 'application/octet-stream',
                'syntax':
                self.get_syntax(filename),
                'modified':
                (time.mktime(private_storage.modified_time(path).timetuple())
                 if not directory else 0),
                'short':
                short,
                'size':
                private_storage.size(path) if not directory else 0,
                'truncated':
                self.truncate(filename),
                'url':
                reverse('mkt.files.list', args=[self.file.id, 'file', short]),
                'url_serve':
                reverse('mkt.files.redirect', args=[self.file.id, short]),
                'version':
                self.file.version.version,
            }

        return res