Ejemplo n.º 1
0
    def check_availability(self):
        """
        Perform check against Default Storage.
        """
        try:
            name = default_storage.get_valid_name('Informer Storage')

            # Save data.
            content = ContentFile('File used by StorageInformer checking.')
            path = default_storage.save(name, content)

            # Check properties.
            default_storage.size(path)
            default_storage.url(path)
            default_storage.path(path)

            default_storage.get_accessed_time(path)
            default_storage.get_available_name(path)
            default_storage.get_created_time(path)
            default_storage.get_modified_time(path)
            default_storage.get_valid_name(path)

            # And remove file.
            default_storage.delete(path)

            storage = default_storage.__class__.__name__
        except Exception as error:
            raise InformerException(
                f'An error occurred when trying to use your Storage: {error}')
        else:
            return True, f'Your {storage} is operational.'
Ejemplo n.º 2
0
def _check_storage():
    filename = 'django-watchman-{}.txt'.format(uuid.uuid4())
    content = 'django-watchman test file'
    path = default_storage.save(filename, ContentFile(content))
    default_storage.size(path)
    default_storage.open(path).read()
    default_storage.delete(path)
    return {"ok": True}
Ejemplo n.º 3
0
def raw_file_view(request, filename):
    if not filename or filename.startswith('/'):
        raise Http404
    if not request.user.is_superuser:
        raise PermissionDenied
    if not default_storage.exists(filename):
        raise Http404

    size = default_storage.size(filename)
    file = default_storage.open(filename, 'rb')
    content_type = mimetypes.guess_type(filename)[0] or \
        'application/octet-stream'
    response = HttpResponse(FileWrapper(file), content_type=content_type)
    response['Content-Length'] = default_storage.size(filename)
    return response
Ejemplo n.º 4
0
def andro_cfg(sha256, force=False):
    if default_storage.size(sha256) > 3*10485760:
        return

    try:
        result = es.get(index=settings.ELASTICSEARCH_APK_INDEX, id=sha256)['_source']
        if result.get('andro_cfg') is not None and not force:
            return
    except Exception:
        return

    with NamedTemporaryFile() as f:
        f.write(default_storage.open(sha256).read())
        f.seek(0)
        with TemporaryDirectory() as output_dir:
            try:
                cfg = CFG(f.name, output_dir, 'raw')
                cfg.compute_rules()
                report = cfg.generate_json_report()
                es.update(index=settings.ELASTICSEARCH_APK_INDEX, id=sha256, body={'doc': {'andro_cfg': report}},
                          retry_on_conflict=5)
                output_path = get_andro_cfg_storage_path(sha256)
                files_to_upload = glob.glob(f'{output_dir}/**/*.bmp', recursive=True)
                files_to_upload.extend(glob.glob(f'{output_dir}/**/*.png', recursive=True))
                files_to_upload.extend(glob.glob(f'{output_dir}/**/*.raw', recursive=True))
                for img in files_to_upload:
                    img_path = img.replace(output_dir, '')
                    print(f'{output_path}{img_path}')
                    default_storage.save(f'{output_path}{img_path}', File(open(img, mode='rb')))
            except Exception as e:
                logging.error(e)
Ejemplo n.º 5
0
def _export_distributed_query_results(distributed_query, extension):
    if extension == ".csv":
        content_type = "text/csv"
        exporter = _export_dqr_to_tmp_csv_file
    elif extension == ".ndjson":
        content_type = "application/x-ndjson"
        exporter = _export_dqr_to_tmp_ndjson_file
    elif extension == ".xlsx":
        content_type = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
        exporter = _export_dqr_to_tmp_xlsx_file
    else:
        raise ValueError(
            f"Unsupported distributed query results export extension: {extension}"
        )

    tmp_filepath = exporter(distributed_query)
    filename, filepath = _dqr_export_filename_filepath(distributed_query,
                                                       extension)

    with open(tmp_filepath, "rb") as tmp_f:
        default_storage.save(filepath, tmp_f)
    os.unlink(tmp_filepath)

    return {
        "filepath": filepath,
        "headers": {
            "Content-Type": content_type,
            "Content-Length": default_storage.size(filepath),
            "Content-Disposition": f'attachment; filename="{filename}"',
        }
    }
Ejemplo n.º 6
0
def download_file(filename,
                  download_url=None,
                  contentnode=None,
                  assessment_item=None,
                  preset=None,
                  file_size=None,
                  lang_id=None):
    checksum, extension = os.path.splitext(filename)
    extension = extension.lstrip('.')
    filepath = models.generate_object_storage_name(checksum, filename)

    # Download file if it hasn't already been downloaded
    if download_url and not default_storage.exists(filepath):
        buffer = StringIO()
        response = requests.get('{}/content/storage/{}/{}/{}'.format(
            download_url, filename[0], filename[1], filename))
        for chunk in response:
            buffer.write(chunk)

        checksum, _, filepath = write_raw_content_to_storage(buffer.getvalue(),
                                                             ext=extension)
        buffer.close()

    # Save values to new file object
    file_obj = models.File(
        file_format_id=extension,
        file_size=file_size or default_storage.size(filepath),
        contentnode=contentnode,
        assessment_item=assessment_item,
        language_id=lang_id,
        preset_id=preset or "",
    )
    file_obj.file_on_disk.name = filepath
    file_obj.save()
Ejemplo n.º 7
0
def get_cached_minifest(app_or_langpack, force=False):
    """
    Create a "mini" manifest for a packaged app or langpack and cache it (Call
    with `force=True` to bypass existing cache).

    Note that platform expects name/developer/locales to match the data from
    the real manifest in the package, so it needs to be read from the zip file.

    Returns a tuple with the minifest contents and the corresponding etag.
    """
    cache_prefix = 1  # Change this if you are modifying what enters the cache.
    cache_key = "{0}:{1}:{2}:manifest".format(cache_prefix, app_or_langpack._meta.model_name, app_or_langpack.pk)

    if not force:
        cached_data = cache.get(cache_key)
        if cached_data:
            return cached_data

    sign_if_packaged = getattr(app_or_langpack, "sign_if_packaged", None)
    if sign_if_packaged is None:
        # Langpacks are already signed when we generate the manifest and have
        # a file_path attribute.
        signed_file_path = app_or_langpack.file_path
    else:
        # sign_if_packaged() will return the signed path. But to call it, we
        # need a current version. If we don't have one, return an empty
        # manifest, bypassing caching so that when a version does become
        # available it can get picked up correctly.
        if not app_or_langpack.current_version:
            return "{}"
        signed_file_path = sign_if_packaged()

    manifest = app_or_langpack.get_manifest_json()
    package_path = app_or_langpack.get_package_path()

    data = {"size": storage.size(signed_file_path), "package_path": package_path}
    if hasattr(app_or_langpack, "current_version"):
        data["version"] = app_or_langpack.current_version.version
        data["release_notes"] = app_or_langpack.current_version.releasenotes
        file_hash = app_or_langpack.current_version.all_files[0].hash
    else:
        # LangPacks have no version model, the version number is an attribute
        # and they don't have release notes.
        data["version"] = app_or_langpack.version
        # File hash is not stored for langpacks, but file_version changes with
        # every new upload so we can use that instead.
        file_hash = unicode(app_or_langpack.file_version)

    for key in ["developer", "icons", "locales", "name"]:
        if key in manifest:
            data[key] = manifest[key]

    data = json.dumps(data, cls=JSONEncoder)
    etag = hashlib.sha256()
    etag.update(data)
    if file_hash:
        etag.update(file_hash)
    rval = (data, etag.hexdigest())
    cache.set(cache_key, rval, None)
    return rval
Ejemplo n.º 8
0
    def create_blocklisted_version(self):
        """
        Creates a new version who's file is the blocklisted app found in /media
        and sets status to STATUS_BLOCKLISTED.

        """
        blocklisted_path = os.path.join(settings.MEDIA_ROOT, 'packaged-apps',
                                        'blocklisted.zip')
        last_version = self.current_version.version
        v = Version.objects.create(
            addon=self, version='blocklisted-%s' % last_version)
        f = File(version=v, status=amo.STATUS_BLOCKED,
                 platform=Platform.objects.get(id=amo.PLATFORM_ALL.id))
        f.filename = f.generate_filename()
        copy_stored_file(blocklisted_path, f.file_path)
        log.info(u'[Webapp:%s] Copied blocklisted app from %s to %s' % (
            self.id, blocklisted_path, f.file_path))
        f.size = storage.size(f.file_path)
        f.hash = f.generate_hash(f.file_path)
        f.save()
        f.inject_ids()
        self.sign_if_packaged(v.pk)
        self.status = amo.STATUS_BLOCKED
        self._current_version = v
        self.save()
Ejemplo n.º 9
0
def _check_storage():
    try:
        filename = 'django-watchman-{}.txt'.format(uuid.uuid4())
        content = 'django-watchman test file'
        path = default_storage.save(filename, ContentFile(content))
        default_storage.size(path)
        default_storage.open(path).read()
        default_storage.delete(path)
        response = {"ok": True}
    except Exception as e:
        response = {
            "ok": False,
            "error": str(e),
            "stacktrace": traceback.format_exc(),
        }
    return response
Ejemplo n.º 10
0
 def _filesize(self):
     if self._filesize_stored != None:
         return self._filesize_stored
     if self.exists():
         self._filesize_stored = default_storage.size(self.path)
         return self._filesize_stored
     return None
Ejemplo n.º 11
0
    def manifest_updated(self, manifest, upload):
        """The manifest has updated, update the version and file.

        This is intended to be used for hosted apps only, which have only a
        single version and a single file.
        """
        data = parse_addon(upload, self)
        version = self.versions.latest()
        version.update(version=data['version'])
        path = smart_path(nfd_str(upload.path))
        file = version.files.latest()
        file.filename = file.generate_filename(extension='.webapp')
        file.size = int(max(1, round(storage.size(path) / 1024, 0)))
        file.hash = (file.generate_hash(path) if
                     waffle.switch_is_active('file-hash-paranoia') else
                     upload.hash)
        log.info('Updated file hash to %s' % file.hash)
        file.save()

        # Move the uploaded file from the temp location.
        copy_stored_file(path, os.path.join(version.path_prefix,
                                            nfd_str(file.filename)))
        log.info('[Webapp:%s] Copied updated manifest to %s' % (
            self, version.path_prefix))

        amo.log(amo.LOG.MANIFEST_UPDATED, self)
Ejemplo n.º 12
0
 def exists(self):
     """ Renvoyer si le fichier existe et est valide """
     file_exists = self.id and self.image and self.image.path and default_storage.exists(self.image.name)
     if file_exists and default_storage.size(self.image.name) < 64:  # Un fichier de moins de 64 octets est nécessairement défectueux, supprimer
         default_storage.delete(self.image.name)
         file_exists = False
     return file_exists
Ejemplo n.º 13
0
def expert_recording(request,phrase_id):
    try:
        phrase = Phrase.objects.get(id=int(phrase_id))
    except Phrase.DoesNotExist:
        raise Exception('phrase with id does not exist: ' + str(phrase_id))

    upload_params = "phrase_id=" + str(phrase.id) + "&type=expert"
    upload_params_encoded = urllib.quote_plus(upload_params)


    # Expert info
    filename = "expert" + str(phrase.id) + ".mp3"
    exists = default_storage.exists(filename)
    created_time = None
    size = None
    if exists:
        created_time = default_storage.created_time(filename)
        size = default_storage.size(filename)

    return render(request,'expert_recording.html',{
        'phrase':phrase,
        'upload_params_encoded':upload_params_encoded,
        'expert_url':lib.expert_url(phrase_id),

        'filename':filename,
        'exists':exists,
        'created_time':created_time,
        'size':size,
    })
Ejemplo n.º 14
0
    def run_test(self, filename, content='Lorem ipsum dolar sit amet'):
        content = UnicodeContentFile(content)
        filename = default_storage.save(filename, content)
        self.assert_(default_storage.exists(filename))

        self.assertEqual(default_storage.size(filename), content.size)
        now = datetime.utcnow()
        delta = timedelta(minutes=5)
        mtime = default_storage.getmtime(filename)
        self.assert_(mtime > mktime((now - delta).timetuple()))
        self.assert_(mtime < mktime((now + delta).timetuple()))
        file = default_storage.open(filename)
        self.assertEqual(file.size, content.size)
        fileurl = force_unicode(file).replace('\\', '/')
        fileurl = urlquote_plus(fileurl, '/')
        if fileurl.startswith('/'):
            fileurl = fileurl[1:]
        self.assertEqual(
            MEDIA_URL+fileurl,
            default_storage.url(filename)
        )
        file.close()

        default_storage.delete(filename)
        self.assert_(not default_storage.exists(filename))
Ejemplo n.º 15
0
def convert(directory, delete=False):
    print 'Converting icons in %s' % directory

    pks = []
    k = 0
    for path, names, filenames in walk_storage(directory):
        for filename in filenames:
            old = os.path.join(path, filename)
            pre, ext = os.path.splitext(old)
            if (pre[-3:] in size_suffixes or ext not in extensions):
                continue

            if not storage.size(old):
                print 'Icon %s is empty, ignoring.' % old
                continue

            for size, size_suffix in zip(sizes, size_suffixes):
                new = '%s%s%s' % (pre, size_suffix, '.png')
                if os.path.exists(new):
                    continue
                resize_image(old, new, (size, size), remove_src=False)

            if ext != '.png':
                pks.append(os.path.basename(pre))

            if delete:
                storage.delete(old)

            k += 1
            if not k % 1000:
                print "... converted %s" % k

    for chunk in chunked(pks, 100):
        Addon.objects.filter(pk__in=chunk).update(icon_type='image/png')
Ejemplo n.º 16
0
def thumb_size(filepath):
    """Determine if a thumbnail file size in storage"""
    if storage.exists(filepath):
        return storage.size(filepath)
    elif os.path.exists(filepath):
        return os.path.getsize(filepath)
    return 0
Ejemplo n.º 17
0
	def __init__(self,filename,file_path,size):
		self.filename = filename 
		self.file_path = file_path 
		
		path_to_file = os.path.join(self.file_path)
        
		self.size = default_storage.size(path_to_file)
Ejemplo n.º 18
0
    def from_upload(cls,
                    upload,
                    version,
                    platform,
                    is_beta=False,
                    parse_data={}):
        addon = version.addon

        file_ = cls(version=version, platform=platform)
        upload.path = smart_path(nfd_str(upload.path))
        ext = os.path.splitext(upload.path)[1]
        if ext == '.jar':
            ext = '.xpi'
        file_.filename = file_.generate_filename(extension=ext or '.xpi')
        # Size in bytes.
        file_.size = storage.size(upload.path)
        data = cls.get_jetpack_metadata(upload.path)
        if 'sdkVersion' in data and data['sdkVersion']:
            file_.jetpack_version = data['sdkVersion'][:10]
        file_.no_restart = parse_data.get('no_restart', False)
        file_.strict_compatibility = parse_data.get('strict_compatibility',
                                                    False)
        file_.is_multi_package = parse_data.get('is_multi_package', False)
        file_.is_experiment = parse_data.get('is_experiment', False)
        file_.is_webextension = parse_data.get('is_webextension', False)

        if is_beta and addon.status == amo.STATUS_PUBLIC:
            file_.status = amo.STATUS_BETA

        file_.hash = file_.generate_hash(upload.path)
        file_.original_hash = file_.hash

        if upload.validation:
            validation = json.loads(upload.validation)
            if validation['metadata'].get('requires_chrome'):
                file_.requires_chrome = True

        file_.save()

        log.debug('New file: %r from %r' % (file_, upload))
        # Move the uploaded file from the temp location.
        destinations = [version.path_prefix]
        if file_.status in amo.MIRROR_STATUSES:
            destinations.append(version.mirror_path_prefix)
        for dest in destinations:
            copy_stored_file(upload.path,
                             os.path.join(dest, nfd_str(file_.filename)))

        if upload.validation:
            # Import loop.
            from olympia.devhub.tasks import annotate_validation_results
            from olympia.devhub.utils import ValidationAnnotator

            validation = annotate_validation_results(validation)
            FileValidation.from_json(file_, validation)

            # Copy annotations from any previously approved file.
            ValidationAnnotator(file_).update_annotations()

        return file_
Ejemplo n.º 19
0
    def manifest_updated(self, manifest, upload):
        """The manifest has updated, update the version and file.

        This is intended to be used for hosted apps only, which have only a
        single version and a single file.
        """
        data = parse_addon(upload, self)
        version = self.versions.latest()
        version.update(version=data['version'])
        path = smart_path(nfd_str(upload.path))
        file = version.files.latest()
        file.filename = file.generate_filename(extension='.webapp')
        file.size = storage.size(path)
        file.hash = (file.generate_hash(path)
                     if waffle.switch_is_active('file-hash-paranoia') else
                     upload.hash)
        log.info('Updated file hash to %s' % file.hash)
        file.save()

        # Move the uploaded file from the temp location.
        copy_stored_file(
            path, os.path.join(version.path_prefix, nfd_str(file.filename)))
        log.info('[Webapp:%s] Copied updated manifest to %s' %
                 (self, version.path_prefix))

        amo.log(amo.LOG.MANIFEST_UPDATED, self)
Ejemplo n.º 20
0
    def __init__(self, path, sort='size', reverse=False):
        self.path = path
        subdirectories, files = default_storage.listdir(path)

        self.subdirectories = []
        for subdirectory in subdirectories:
            if subdirectory.startswith('.'):
                continue
            subsubdirectory, subfiles = default_storage.listdir(
                os.path.join(self.path, subdirectory))
            item_count = len(subsubdirectory) + len(subfiles)
            self.subdirectories.append((subdirectory, item_count))

        self.files = []
        for file in files:
            if file.startswith('.'):
                continue
            path_to_file = os.path.join(self.path, file)
            size = default_storage.size(path_to_file)
            self.files.append((file, size))

        if sort == 'name':
            self.subdirectories.sort(
                key=lambda subdirectory: subdirectory[0].lower(), reverse=reverse)

            self.files.sort(key=lambda file: file[0].lower(), reverse=reverse)
        else:
            self.subdirectories.sort(
                key=lambda subdirectory: subdirectory[1], reverse=reverse)

            self.files.sort(key=lambda file: file[1], reverse=reverse)
Ejemplo n.º 21
0
    def from_upload(cls, upload, version, parsed_data=None):
        """
        Create a File instance from a FileUpload, a Version and the parsed_data
        generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results."""
        assert parsed_data is not None

        file_ = cls(version=version)
        upload_path = force_str(nfd_str(upload.path))
        ext = force_str(os.path.splitext(upload_path)[1])
        file_.filename = file_.generate_filename(extension=ext or '.xpi')
        # Size in bytes.
        file_.size = storage.size(upload_path)
        file_.is_restart_required = parsed_data.get('is_restart_required', False)
        file_.strict_compatibility = parsed_data.get('strict_compatibility', False)
        file_.is_experiment = parsed_data.get('is_experiment', False)
        file_.is_webextension = parsed_data.get('is_webextension', False)
        file_.is_mozilla_signed_extension = parsed_data.get(
            'is_mozilla_signed_extension', False
        )

        file_.hash = file_.generate_hash(upload_path)
        file_.original_hash = file_.hash
        file_.manifest_version = parsed_data.get(
            'manifest_version', DEFAULT_MANIFEST_VERSION
        )
        file_.save()

        if file_.is_webextension:
            permissions = list(parsed_data.get('permissions', []))
            optional_permissions = list(parsed_data.get('optional_permissions', []))

            # devtools_page isn't in permissions block but treated as one
            # if a custom devtools page is added by an addon
            if 'devtools_page' in parsed_data:
                permissions.append('devtools')

            # Add content_scripts host matches too.
            for script in parsed_data.get('content_scripts', []):
                permissions.extend(script.get('matches', []))
            if permissions or optional_permissions:
                WebextPermission.objects.create(
                    permissions=permissions,
                    optional_permissions=optional_permissions,
                    file=file_,
                )

        log.info(f'New file: {file_!r} from {upload!r}')

        # Move the uploaded file from the temp location.
        copy_stored_file(upload_path, file_.current_file_path)

        if upload.validation:
            validation = json.loads(upload.validation)
            FileValidation.from_json(file_, validation)

        return file_
Ejemplo n.º 22
0
def package_minifest(request):
    """Serves the mini manifest ("minifest") for the packaged `.zip`."""
    package_path, package_etag = get_package_info()

    manifest_content = json.dumps({
        'description': 'Firefox Marketplace',
        'developer': {
            'name': 'Mozilla',
            'url': 'http://mozilla.org',
        },
        'icons': {
            '64': 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADwAAAA8CAYAAAA6/NlyAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2hpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDowNDgwMTE3NDA3MjA2ODExODIyQUI1QzMyMDQyNjY5NSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDo2MDE5RjlGQTIyQjgxMUUyQkUyM0JCNEZBMkI4QTY1RSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo2MDE5RjlGOTIyQjgxMUUyQkUyM0JCNEZBMkI4QTY1RSIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M2IChNYWNpbnRvc2gpIj4gPHhtcE1NOkRlcml2ZWRGcm9tIHN0UmVmOmluc3RhbmNlSUQ9InhtcC5paWQ6RkI3RjExNzQwNzIwNjgxMTgyMkFBQUUyODlFQjEzQUMiIHN0UmVmOmRvY3VtZW50SUQ9InhtcC5kaWQ6MDQ4MDExNzQwNzIwNjgxMTgyMkFCNUMzMjA0MjY2OTUiLz4gPC9yZGY6RGVzY3JpcHRpb24+IDwvcmRmOlJERj4gPC94OnhtcG1ldGE+IDw/eHBhY2tldCBlbmQ9InIiPz5kNw/pAAAWz0lEQVR42uRbe5Bf1V3/nnPu/f32kc3mBaEbSHaTJaUJJIBxKLYQKEIrThWswAAFO+UxCgp2+IOpY8fRGYs6tRXGBx2RqtMpAh1DEYujghBEHpJAQiAPFpJssiTZ7GY3m939Pe695/h9nd/vtwlQaBPrjL/Mzb2/+zv3nu/j832esyaEAMYYOCGfL/7AgMV3Gwdg8TvNY+QWXoDHuflD5+ABb+AZv//958OJIId5Pa4MX/0wcmPvAJvciec+5sxaPixee2LcKONNKoRJX8i1V8Z9GMB7fwrfu/L+48mwPa4ibGu7F5n9M7BuBrP4HTwekOB1gmd31EFj8WzjM4YO6MfzX8EtT3zjeJJ4XDS88rtbPjk9Xb9n5zPvXISUI9EJn1izzgqzThnhM6sZWLVFEO2iZg1qNvA1HoVqGj+n/3zv8+Vy8rUt1678j58qpD/+N5tXW2e+FRJz8d63R2Fy24gwy5oiJgnhFsysEiw7cw4kbQmas0G+DfMsphsgqxSw4/VxgKkMLDLridEix4OgnsO8s06Cnt65EIrwUlHAndt+7cyXflyGkx/nwcX3b5qFhP9BDuaOhNkCmByeOho74phQq/3I7NzFHThQmbVHmTAy6FHDAy+PgmfN+xmvOnRgmhnGz3l4vLD8wc0P4PC7B25eNfZRaf/INrzoLzadBcG8YgN8BUkl4IKvIYEjk+JpG5yYBkdZxZN2GMLEnCkCD5UjMHJpDP1uidkgYxHj8o6RI5BnPtqgQVnegpeber+9+fwTasM99752o0nMt501beRrUnK6qLF9e8dgYvMQ6jpRSFvRuzosgwNPXtoFJYS0j2FJTRgFB9VqDgd2osCIKYIye2oviubvOcw/51RY2DMbTTtAjlDPyd6LkCPE7xq6ffV9x92GF3xz493ogP7IJQYSdqqGHW6eFbDr2QG6UIZdix3P9NSNe6R9njI0Y2+IzkscGBRGIFBkcq+UQv/Fy9DnWbF78mvs25jxP86L8NXhr5wTjgvDJ31jw9eDtV+lqEKMoqNiZ1ut1GD/hj3obCpNR4XUz4YcysynZbt1fLY8xON/0Y6iATiEeA6SfBTIbIZMkJUcRosJ9E7iDLUMXbPgtDWnofMriSzoOfZvaAre31/z5raxu96f6fdleP7Zd8/3zt3gE/SqfX1ri7ndV1icuEAcG+Q0R6IqY9MAo5MSZpCh36s8C7fBI9CWFBC9UjUngnB8YSXKtLiMwLZKEQq9OFJPOQmJy1kPJTzKidBUzRz8dbgCfrfzUmGc2Dm5Czq720SgeM+zQFDT44f/FQZ2PukQbcYXfzu28Z7xD+WluyYGbzk8Z8k9Bqn0O3dDsctBhvjNbQq5SzXOEkTx2hu4oPI2fGb8UdgOFG4SYQLoEB1SLBaVeuZKki25J+mlkRBEcEW4B+8E7U5QfWFYB2vzXnh2Vp/E572jMLUHoY7GbEKGTOeQIAJsXlyWen8ZahHmjW5tRxd+z4fy0vUi/yVL3pSSASTaoVu0CB+Ls9PLJe8laROVBdwy8U+aKxNsE1a6Qw/LckkpDAV0cHjgdQkFlZL9W0m8SuwLHP/mcAyZDD1DskyQaUeODd/7G+Pr0Kmp0EIujOP8NvdMl8XvjoIa0+VhKq9e+V68HaPh/v4vzx6pHfpZxyHGkNTY4xZke8Qgwt/naHEJPZrDOdm7sCS8AylplLTH8BTHJlr08E66AjZ0rMJ3JDE0tEQvee+a6TehP3tdMIHvIE2T36KxFqddAoNwPiLphVKP2nTBwkdDgwRpJGYtKShI2JuqV885o++62dt2fm/iAxlGs7siBJ+Q1yTJkY5JagkShnzyExiWOFoEvF5b24k2JzBlZ2ZiUWT5vNt9HH51we0Sf0AHQqvPCOKhOz4Nj438JSz1W/GrjDNcTyANFjj7urDYCS8UC5lZ43NhMohGCY2WiSoYlfgvyX1xNU7wwAdCumrNteQA6GGCCUvNNyFDGk882g1q16AE1uQ72DkTJBmm+J+jgyCJzmd70i8elnPmXGNJLuGmyBvaot/eLC2X2sLIXHRN70vQthOU5Jr6GzzO4bMpwVnsFqEv18S4jRkN0jvt3I0faMMLl92S5nl2SbNME+nZKNEIHWY6h54wCaeHt9gm2WM6y8QSbAjSCX6/oHgR5tWmQLMFPR97PbtWhbXFy5AiRSlympA/sMp44vldfW4QluQTaNc4f5Y1mUXBEQqtKoY1jc9NZ9M/t3LJdaX3hXQ+/+RrzP63U36QoFSIRDwmFFYTH5dTPR8Y3tfYrUicOBWKtdbkDGXOM0Ai1kI7Dv+Z3wM764vRXhMtGpopKI/H772wB8puguFO4YlgbPE/Tw6cIGs8h8Tr/Btwr1/FjJJSSLMNZn3RyNS48qplbry/99dhN9z33gwn6d2pVOOsXSI8Mg3qcDznuw7OMofhKru+4ZGNapSs0zmjJW3g37tgGla5bWyXtrX+D820myvBgoqNABKojMxP5oWMF+RN8IcvmGfhKb8ItocutVmxY+ubaSkjkuksYLrwd+Lr7jsW0neuL7ePj65g1QWpXphpSWnY5ix/R4/qh+Hr7nHodFUJP0byamIuSamQD6ylxIoTI1skJPAYzT7Jzm1LSKff0hKweaRW01cTewT4PXEM8Q6XwbeSdfCpsB99CMFZa2d1WFbNUEInCung3j6467+6j9GwzWqfX9VZsb59NvKL0itJOojFgoQXFPdp9gicl70FZxeb2EFRZcNpo6acVMawjRtJPTnTZJUL7EkLoQFlr/bShDiVks5TruxVu5IrW6SBeDIY9yiNnJOMw5/YR2Bjx7kYppbDkO9A3VjhE+FCPEPXJzDicLprnqpXrsV575/JsPdXfWnPA1DCOyX8Tzxkit+ReIy5CWuRtOL4N2GWxihzCmXt1rCAjBVfELMuowIwLdkuCdQTk1o5BSuCIJopLGF6J7FZTYQJyaW2ONdvhNWV1xiAGSKwwGpLiooC6ujUMhROhjde8NdcNTWD4Vv/DeO1v0iqOqO2l0iFR54XxGFY5NS1aJaZ5ZgbODaTIDgG0xh0YKRVYyUeN8IwfuHSmOYw0iggRAQ2I81AC8nCyHEBwV4LKG+1puQyrWAhOU9xmpChjo4moYwQpR4wfJLTqwVzPtz2NHnreqJZdR+l5DYyqJ7WISZpTiKctRsdlLampD1FCbkyThMm4nWNVkdSGRrWKvH5/JYCth7IedrlCxJYe7ZrJCM0NkCISAdN1LBoUWfjxS+QcCLTOaGMzAMT74Kuqdoi08AJqUtqHZcp7cj92fiGlzWDL86gQkG05cWpcANOG4rcx3HiqHkC27BZdkyxb6dx02mDku/bpiC2D/oGs/TZMZLDlgHfRAGPM3w4pz0EfRc7OEUXx31CCDkyI40RQZyTuciPUCqs8zMUirC66aW9X8IQo0FGG2+sNa8Ei5GS0xFmNeQYgWVk1katspCEcJqYyKvWCnjp7RxisyMeGwZzqNSDzGOoXlVB03NOTNZaPTtBIDOkyqDCQ3xGo9vL9LAAiR5UAYVs9N7LWhlenHNVEjiWNtrDDO1EiniFsnXChEuksLfadbWc/jUbHDJhwqGEtPvcazmXgi6GGz3o3vqNdTCYYplSyuM5hlKyQyal3SKjiUxkmis4GhcbK9r+cyZpZGmGsz8JURhmWxkO88greFfWF4izsdBCvG12aLiCCSJNLtytb/SpnAoEtHiAcgkm6wBDhwOjxqjPjgf923ckwHhNPDZQ0yFN8W7OjJnItFVza1m5oXhveE6r/Xzf6Io60S1LKPBKhl/QquF2SnWqSbcQxFD1ymSEuMCnMaFVBlkAKpxYKSHRDDOyeXQaGzbXYjiWhATfnWrWpWiEDRuxmEA/asuBnwuJncE0Z2lBHKLU2lbnk44NURDpJGRy0xQFUjPdsU82qxmWfNFJYslsO34Za9gXGYOEFIGziSGI4GQFZ0YzKf49EkZ+sYQOBANpHhwMjRYscc6H8bUXre3k6uepp6cbfa19YwXUUOjl1LKmKAQRaKkiY9NSJ0m1Pzkp1iKtalDtbIyakEQRr/Cn5KuedMb1qo4WDWNkRClUbZtGCMkM7FEQiutgAnllTHvHEn89PxMYf5jPtqcwsDPjeMRawXG9PQ5OPT2BU5Y6WLo4EWTouwd25KwCn4jjMngOrLWC7ZXhyz0wcaBcB7PphYbpRTuUXABRazu0NPXQyvAU5cq1tFOIt02nFWt1Sw081l7z5Y4dnNVCQWYzHI/kzZQyvru34Hq2pMfqT7eh9iltC7D6ghJWSM3fhgYxmbCCu0D2Se0hcmaNRrYIn5IgPltpAHJC09INFUqE9mrSrhrOj7RCmtdJpjE+25ZmhCRCnm3HNOwl/iapDcVHHmeDahe1liD0UECU/k1MFJQs8XNUQGx9pQb1ilRkaZuD9tRjGijvnJq2nDURcgIZed3zez2nXRm7OeKPvbsSaLUzwq0dTVuNenXygTXSsOcqaqqFYX+QThWCdGiJk/GFlq2iAedGh8ZKRsXeNVgt64AhTZo5OFRnItMIN8zzDu4N+riVjAlVVbKxFx/gwJ4cenoTKQS4wJB0kkIUFwVavgotXjOywMk1x+jMN7rennlq15XIbH+T4SLsphkqSblFvVrZtAiAF760GBCP6GUZyNumy9UCn+7vHigQskECkOaGsT0ruVKzEcAM4t09OzL42BLLsIXUa/sW2JZFKkWjjmYX4rSvTT/n2gbm0kqGTNoyowMJGmjacOH3UFiqmDbOZRtdRXSJPpY2sQBo9OBUu/ICHJuJA0EGKSS8+lwNjhyiPlfCGqTMvQ2vyyhIWpXgZjuey9R4x3sl59m0p0YCvPZMlevxmOE1EEXmZVyj0c+/BXGGrbsniO74SDWUBP+539mi4WILnWpEVmjZhgAzm4vU2mn9Hh2ceOlUOow41esvZDBx0Mhim5oBhzKNJ8Y1Fxophsu6OLWCaVXRwpEDAFuer8HKT6bslcFkXCtLH7xpq62kGF1wtOqNudzEG1OuLS5C/XdTw/9y8z4MWu9UbCL9u/iyYBsvYli0rBiAOjQ2w+jJ8PqNlzI4MmKZWdJcGzJRdtSAl+ZcmZrxaJe0lEJHCb+nqeP428YaD6z1IwcMbH8xExNqIEm7ul4984xWkRd643dap8LztCGG/RgyvXVmTyvP/j3Ns1vlQVruMNLhD4k8j5N6Lzl1I5qBwpw8JBJZYH1WGbPQ4aSujg0Bk3gpNZ3I2LlGkOFcWvazGF678rS8g/PkeNSHDWT1giKYFI2mpQ/WquHo4NTvRN9ACilnVdLw06hUvHlTC8M+f2heOnFr+yyUeruDBGNIgmopYUJPiwykBa5eEqP7Uhz3jCVmS2ymriQZRoR6TAQanUwjaIgpKMut0PqWlYbXOImh3pT6QjcL83sUmM0oJKWMPoMQJcdEHV5aZaAgYrC0zzPqUyc8ljoepprCXDOByqw/dExPy+XZ+rkwOe2S0CEVDpaCvMYjpRhhlL5ThcR8JZI8m8RqKOJ+amN5pbUNy0lJTGC40WdZG7y2ZiVc5dBcxSTPGwqFaLtm4Eb7xNSRpD4bVv42l5aQ5feIs/Q2l/UwKl9RACeHcXQA+RPHdC2L9Xf5Uzunt3PTuxQgxYMbeSTdMmq1TIk9EtAG/J1qXXKzBu+bdsNjbWqOaoVqCDLa3wnRBiUskcPyHhpt2RgCSesm2iOltW1GO5xIU2q1lHRMhylxe1SaBuVE62EyoRwRGGBhR2U3PH1H7T370rbUcVOowEZnCoZXYur4khLDmF5CXpoYM85pd0IdivNswxxGona9Zi4KV9CaOS5qkDZDFErw0XVwWAw2riuBvpfKTM8LX0ZDMaGM+30UYg3ig3J3ytlLiBz87mmR3BFK099536WW63/43Kv7Ovr2cugjKKc0GULEoW2keE5ju6ZgzdqS4XG0rmlIECUvYQakPAtGmPWqWe+bOx3iqiv3ujhLs+L8gt7TvS0cdqMJEbqohMR56AyukNSW7lGhb70WMMCF/3Dac+gLT7z46AcupuXp/Mtz8rC8Xu+5JqUXWWTcEaRIq9RCoVYiHSXpN4F+l7DmdTuSrgLqVqSgyzfcSpTlXWbcaAbnY/88SCOMowAKV44gmpYGFldQkoMQbQUrx7k60oxH4MwKC4eTbv6RC+JX/vCV1/eWTn/GEWxoDRYZTkKdHQG9zJB2EeokAMKTo/Wk1CvkFdCcTAh8C2R69goHy27AjGsRmkIXjpuNNONhsSSn5d7e6x10r3TcefTUSOdFA1laIYRJI8ELypzgmOYkOkyoyZoWhlALctBq5xCctu1zj7+27kcuiNNnVd/wJft3z5o+JUyWDWe4tHqI8YzStED+NGHvSL2jQPZLjdpEF4iCwpm17GDeSg+9l0mXvf9qXY/Ji5acXTZ3nHZpYG2PbjINs2fBEYNoUpxoZYGveXNMJpUUu+qcFs/qMZGHSV/OczvnPIDBD7cxbdGfj/vxdOnlNUJoXWIdb39AZnmpElM9BpyN4UfaELTyz7sGFZbEbN8vxITBwMhGD7seL2QhCane9c8AB17R7QsY3Hs/62HuWSJiCsJs+5zo6HqRUSdghBba32HZu6lDyAs2meGk/9bPrNs88ZF24l3yj5uffhtW3Ef7KrjYYIiT96sj3DDlK0iieqB0Q1HBl1VgziIP3XicsgY1erkXDTgpWg/tKKByUBe/qDgfKWB8W9yflTHTSz+Xw8Jz69B1qoeuHnGcgFIPvoJjqo2DPXOBdGS0uSVnnknub9X7H7vw4Te+8358feBey0vXvfnbz/7y4k+sKAYvI8ZDqrUnrTAiIyGzsprLBbokDKuvn8TvJW2Z6FpoqLETyqsGqsPUaZhmUVeHS5AuCBqDNEdEinovjTvw6o3dP0ZrX9k1EGRBvS5MEm0IOthV6dl84fcHrvyJ9lqu/cHgZ9/Jel5lhmvQnKQOkg3x1oW6ronELQ0VJVZjkm4RLnV7yGoOBp5MYcdjCdTwutStSbFXTRe6OwgTAio5IWqQ56rzGKp7Q6b1goJssLJg15pH3119XDaXnvf9d8/dUV/0DEO7rkeuQZ8mrgfZegh6g4jOa8I4EPMigPn9Febr4JYERrenLIe5S+vRXnTclD4Xt1xkIlR6P60W5KJNStm9Prq7Mv/NVf8w0ndcd9Oe9+jQxW9We79TV5umyXISeE2Yj/eEqEJgX6+LhnhfVQ2Zq0E6O+M1XzrSzgwWnIEM1vHIpmQcPZvV5Vl+T2CeCV1B5y7iGYdsH1/05FkPja48IduHP/XIri/vqC+7cmIKLShCXAmAFoLYgRQKfXT1UMuBF25RHWdeMUZLfBxPV/zKYYUx/a4HM6mmUxM0eQUOa7QiQp5CsLw13febax4euvyE/wnAli+e0hb81KbFHUeWk39yJc1tS1oWJrGdepRrtDCjpp1x7VuuWyJN65kYJeEOTXXtta78M/0Pjgz/r/4JwCs39v7iHLPnoY+1F12UZ3Oeq7mvSZrMxkVy08p8awtJKybQhe/IPDMbzQXPI9NQGckX33Hu3w0+8FP9M57NX1p0Q4cZvu/k9mxOosyyAHRBybiWDXimxZBCyxFbaUUL47kyWk0nj+QLfn/lg/t+or9wOe5/t7Tr9lP7JipT351fGlvTnUKJ94mnuq/Dzmz6zWjX+OYORNCNeUeqkB2qd29xacdNy+/f9+r/zT/UavmM/lZp9v76nD8shanLOtPpntSFNqzZadGeO7zSW5U/hah7dLi5qU5ns4YzaF8/rzP52sJvvrv3hP1l2v+nz/8IMABBqbSZZcgDWQAAAABJRU5ErkJggg==',
        },
        'name': getattr(settings, 'WEBAPP_MANIFEST_NAME', 'Marketplace'),
        'size': storage.size(package_path),
        'package_path': absolutify(reverse('package.zip')),
        #'release_notes': '',
        'version': '0.0.1',
    })
    manifest_etag = hashlib.md5(manifest_content).hexdigest()

    @etag(lambda r: manifest_etag)
    def _inner_view(request):
        response = HttpResponse(manifest_content,
                                mimetype='application/x-web-app-manifest+json')
        return response

    return _inner_view(request)
Ejemplo n.º 23
0
def package_minifest(request):
    """Serves the mini manifest ("minifest") for the packaged `.zip`."""
    package_path, package_etag = get_package_info()

    manifest_content = json.dumps({
        'description':
        'Firefox Marketplace',
        'developer': {
            'name': 'Mozilla',
            'url': 'http://mozilla.org',
        },
        'icons': {
            '64':
            'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADwAAAA8CAYAAAA6/NlyAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2hpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDowNDgwMTE3NDA3MjA2ODExODIyQUI1QzMyMDQyNjY5NSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDo2MDE5RjlGQTIyQjgxMUUyQkUyM0JCNEZBMkI4QTY1RSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo2MDE5RjlGOTIyQjgxMUUyQkUyM0JCNEZBMkI4QTY1RSIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M2IChNYWNpbnRvc2gpIj4gPHhtcE1NOkRlcml2ZWRGcm9tIHN0UmVmOmluc3RhbmNlSUQ9InhtcC5paWQ6RkI3RjExNzQwNzIwNjgxMTgyMkFBQUUyODlFQjEzQUMiIHN0UmVmOmRvY3VtZW50SUQ9InhtcC5kaWQ6MDQ4MDExNzQwNzIwNjgxMTgyMkFCNUMzMjA0MjY2OTUiLz4gPC9yZGY6RGVzY3JpcHRpb24+IDwvcmRmOlJERj4gPC94OnhtcG1ldGE+IDw/eHBhY2tldCBlbmQ9InIiPz5kNw/pAAAWz0lEQVR42uRbe5Bf1V3/nnPu/f32kc3mBaEbSHaTJaUJJIBxKLYQKEIrThWswAAFO+UxCgp2+IOpY8fRGYs6tRXGBx2RqtMpAh1DEYujghBEHpJAQiAPFpJssiTZ7GY3m939Pe695/h9nd/vtwlQaBPrjL/Mzb2/+zv3nu/j832esyaEAMYYOCGfL/7AgMV3Gwdg8TvNY+QWXoDHuflD5+ABb+AZv//958OJIId5Pa4MX/0wcmPvAJvciec+5sxaPixee2LcKONNKoRJX8i1V8Z9GMB7fwrfu/L+48mwPa4ibGu7F5n9M7BuBrP4HTwekOB1gmd31EFj8WzjM4YO6MfzX8EtT3zjeJJ4XDS88rtbPjk9Xb9n5zPvXISUI9EJn1izzgqzThnhM6sZWLVFEO2iZg1qNvA1HoVqGj+n/3zv8+Vy8rUt1678j58qpD/+N5tXW2e+FRJz8d63R2Fy24gwy5oiJgnhFsysEiw7cw4kbQmas0G+DfMsphsgqxSw4/VxgKkMLDLridEix4OgnsO8s06Cnt65EIrwUlHAndt+7cyXflyGkx/nwcX3b5qFhP9BDuaOhNkCmByeOho74phQq/3I7NzFHThQmbVHmTAy6FHDAy+PgmfN+xmvOnRgmhnGz3l4vLD8wc0P4PC7B25eNfZRaf/INrzoLzadBcG8YgN8BUkl4IKvIYEjk+JpG5yYBkdZxZN2GMLEnCkCD5UjMHJpDP1uidkgYxHj8o6RI5BnPtqgQVnegpeber+9+fwTasM99752o0nMt501beRrUnK6qLF9e8dgYvMQ6jpRSFvRuzosgwNPXtoFJYS0j2FJTRgFB9VqDgd2osCIKYIye2oviubvOcw/51RY2DMbTTtAjlDPyd6LkCPE7xq6ffV9x92GF3xz493ogP7IJQYSdqqGHW6eFbDr2QG6UIZdix3P9NSNe6R9njI0Y2+IzkscGBRGIFBkcq+UQv/Fy9DnWbF78mvs25jxP86L8NXhr5wTjgvDJ31jw9eDtV+lqEKMoqNiZ1ut1GD/hj3obCpNR4XUz4YcysynZbt1fLY8xON/0Y6iATiEeA6SfBTIbIZMkJUcRosJ9E7iDLUMXbPgtDWnofMriSzoOfZvaAre31/z5raxu96f6fdleP7Zd8/3zt3gE/SqfX1ri7ndV1icuEAcG+Q0R6IqY9MAo5MSZpCh36s8C7fBI9CWFBC9UjUngnB8YSXKtLiMwLZKEQq9OFJPOQmJy1kPJTzKidBUzRz8dbgCfrfzUmGc2Dm5Czq720SgeM+zQFDT44f/FQZ2PukQbcYXfzu28Z7xD+WluyYGbzk8Z8k9Bqn0O3dDsctBhvjNbQq5SzXOEkTx2hu4oPI2fGb8UdgOFG4SYQLoEB1SLBaVeuZKki25J+mlkRBEcEW4B+8E7U5QfWFYB2vzXnh2Vp/E572jMLUHoY7GbEKGTOeQIAJsXlyWen8ZahHmjW5tRxd+z4fy0vUi/yVL3pSSASTaoVu0CB+Ls9PLJe8laROVBdwy8U+aKxNsE1a6Qw/LckkpDAV0cHjgdQkFlZL9W0m8SuwLHP/mcAyZDD1DskyQaUeODd/7G+Pr0Kmp0EIujOP8NvdMl8XvjoIa0+VhKq9e+V68HaPh/v4vzx6pHfpZxyHGkNTY4xZke8Qgwt/naHEJPZrDOdm7sCS8AylplLTH8BTHJlr08E66AjZ0rMJ3JDE0tEQvee+a6TehP3tdMIHvIE2T36KxFqddAoNwPiLphVKP2nTBwkdDgwRpJGYtKShI2JuqV885o++62dt2fm/iAxlGs7siBJ+Q1yTJkY5JagkShnzyExiWOFoEvF5b24k2JzBlZ2ZiUWT5vNt9HH51we0Sf0AHQqvPCOKhOz4Nj438JSz1W/GrjDNcTyANFjj7urDYCS8UC5lZ43NhMohGCY2WiSoYlfgvyX1xNU7wwAdCumrNteQA6GGCCUvNNyFDGk882g1q16AE1uQ72DkTJBmm+J+jgyCJzmd70i8elnPmXGNJLuGmyBvaot/eLC2X2sLIXHRN70vQthOU5Jr6GzzO4bMpwVnsFqEv18S4jRkN0jvt3I0faMMLl92S5nl2SbNME+nZKNEIHWY6h54wCaeHt9gm2WM6y8QSbAjSCX6/oHgR5tWmQLMFPR97PbtWhbXFy5AiRSlympA/sMp44vldfW4QluQTaNc4f5Y1mUXBEQqtKoY1jc9NZ9M/t3LJdaX3hXQ+/+RrzP63U36QoFSIRDwmFFYTH5dTPR8Y3tfYrUicOBWKtdbkDGXOM0Ai1kI7Dv+Z3wM764vRXhMtGpopKI/H772wB8puguFO4YlgbPE/Tw6cIGs8h8Tr/Btwr1/FjJJSSLMNZn3RyNS48qplbry/99dhN9z33gwn6d2pVOOsXSI8Mg3qcDznuw7OMofhKru+4ZGNapSs0zmjJW3g37tgGla5bWyXtrX+D820myvBgoqNABKojMxP5oWMF+RN8IcvmGfhKb8ItocutVmxY+ubaSkjkuksYLrwd+Lr7jsW0neuL7ePj65g1QWpXphpSWnY5ix/R4/qh+Hr7nHodFUJP0byamIuSamQD6ylxIoTI1skJPAYzT7Jzm1LSKff0hKweaRW01cTewT4PXEM8Q6XwbeSdfCpsB99CMFZa2d1WFbNUEInCung3j6467+6j9GwzWqfX9VZsb59NvKL0itJOojFgoQXFPdp9gicl70FZxeb2EFRZcNpo6acVMawjRtJPTnTZJUL7EkLoQFlr/bShDiVks5TruxVu5IrW6SBeDIY9yiNnJOMw5/YR2Bjx7kYppbDkO9A3VjhE+FCPEPXJzDicLprnqpXrsV575/JsPdXfWnPA1DCOyX8Tzxkit+ReIy5CWuRtOL4N2GWxihzCmXt1rCAjBVfELMuowIwLdkuCdQTk1o5BSuCIJopLGF6J7FZTYQJyaW2ONdvhNWV1xiAGSKwwGpLiooC6ujUMhROhjde8NdcNTWD4Vv/DeO1v0iqOqO2l0iFR54XxGFY5NS1aJaZ5ZgbODaTIDgG0xh0YKRVYyUeN8IwfuHSmOYw0iggRAQ2I81AC8nCyHEBwV4LKG+1puQyrWAhOU9xmpChjo4moYwQpR4wfJLTqwVzPtz2NHnreqJZdR+l5DYyqJ7WISZpTiKctRsdlLampD1FCbkyThMm4nWNVkdSGRrWKvH5/JYCth7IedrlCxJYe7ZrJCM0NkCISAdN1LBoUWfjxS+QcCLTOaGMzAMT74Kuqdoi08AJqUtqHZcp7cj92fiGlzWDL86gQkG05cWpcANOG4rcx3HiqHkC27BZdkyxb6dx02mDku/bpiC2D/oGs/TZMZLDlgHfRAGPM3w4pz0EfRc7OEUXx31CCDkyI40RQZyTuciPUCqs8zMUirC66aW9X8IQo0FGG2+sNa8Ei5GS0xFmNeQYgWVk1katspCEcJqYyKvWCnjp7RxisyMeGwZzqNSDzGOoXlVB03NOTNZaPTtBIDOkyqDCQ3xGo9vL9LAAiR5UAYVs9N7LWhlenHNVEjiWNtrDDO1EiniFsnXChEuksLfadbWc/jUbHDJhwqGEtPvcazmXgi6GGz3o3vqNdTCYYplSyuM5hlKyQyal3SKjiUxkmis4GhcbK9r+cyZpZGmGsz8JURhmWxkO88greFfWF4izsdBCvG12aLiCCSJNLtytb/SpnAoEtHiAcgkm6wBDhwOjxqjPjgf923ckwHhNPDZQ0yFN8W7OjJnItFVza1m5oXhveE6r/Xzf6Io60S1LKPBKhl/QquF2SnWqSbcQxFD1ymSEuMCnMaFVBlkAKpxYKSHRDDOyeXQaGzbXYjiWhATfnWrWpWiEDRuxmEA/asuBnwuJncE0Z2lBHKLU2lbnk44NURDpJGRy0xQFUjPdsU82qxmWfNFJYslsO34Za9gXGYOEFIGziSGI4GQFZ0YzKf49EkZ+sYQOBANpHhwMjRYscc6H8bUXre3k6uepp6cbfa19YwXUUOjl1LKmKAQRaKkiY9NSJ0m1Pzkp1iKtalDtbIyakEQRr/Cn5KuedMb1qo4WDWNkRClUbZtGCMkM7FEQiutgAnllTHvHEn89PxMYf5jPtqcwsDPjeMRawXG9PQ5OPT2BU5Y6WLo4EWTouwd25KwCn4jjMngOrLWC7ZXhyz0wcaBcB7PphYbpRTuUXABRazu0NPXQyvAU5cq1tFOIt02nFWt1Sw081l7z5Y4dnNVCQWYzHI/kzZQyvru34Hq2pMfqT7eh9iltC7D6ghJWSM3fhgYxmbCCu0D2Se0hcmaNRrYIn5IgPltpAHJC09INFUqE9mrSrhrOj7RCmtdJpjE+25ZmhCRCnm3HNOwl/iapDcVHHmeDahe1liD0UECU/k1MFJQs8XNUQGx9pQb1ilRkaZuD9tRjGijvnJq2nDURcgIZed3zez2nXRm7OeKPvbsSaLUzwq0dTVuNenXygTXSsOcqaqqFYX+QThWCdGiJk/GFlq2iAedGh8ZKRsXeNVgt64AhTZo5OFRnItMIN8zzDu4N+riVjAlVVbKxFx/gwJ4cenoTKQS4wJB0kkIUFwVavgotXjOywMk1x+jMN7rennlq15XIbH+T4SLsphkqSblFvVrZtAiAF760GBCP6GUZyNumy9UCn+7vHigQskECkOaGsT0ruVKzEcAM4t09OzL42BLLsIXUa/sW2JZFKkWjjmYX4rSvTT/n2gbm0kqGTNoyowMJGmjacOH3UFiqmDbOZRtdRXSJPpY2sQBo9OBUu/ICHJuJA0EGKSS8+lwNjhyiPlfCGqTMvQ2vyyhIWpXgZjuey9R4x3sl59m0p0YCvPZMlevxmOE1EEXmZVyj0c+/BXGGrbsniO74SDWUBP+539mi4WILnWpEVmjZhgAzm4vU2mn9Hh2ceOlUOow41esvZDBx0Mhim5oBhzKNJ8Y1Fxophsu6OLWCaVXRwpEDAFuer8HKT6bslcFkXCtLH7xpq62kGF1wtOqNudzEG1OuLS5C/XdTw/9y8z4MWu9UbCL9u/iyYBsvYli0rBiAOjQ2w+jJ8PqNlzI4MmKZWdJcGzJRdtSAl+ZcmZrxaJe0lEJHCb+nqeP428YaD6z1IwcMbH8xExNqIEm7ul4984xWkRd643dap8LztCGG/RgyvXVmTyvP/j3Ns1vlQVruMNLhD4k8j5N6Lzl1I5qBwpw8JBJZYH1WGbPQ4aSujg0Bk3gpNZ3I2LlGkOFcWvazGF678rS8g/PkeNSHDWT1giKYFI2mpQ/WquHo4NTvRN9ACilnVdLw06hUvHlTC8M+f2heOnFr+yyUeruDBGNIgmopYUJPiwykBa5eEqP7Uhz3jCVmS2ymriQZRoR6TAQanUwjaIgpKMut0PqWlYbXOImh3pT6QjcL83sUmM0oJKWMPoMQJcdEHV5aZaAgYrC0zzPqUyc8ljoepprCXDOByqw/dExPy+XZ+rkwOe2S0CEVDpaCvMYjpRhhlL5ThcR8JZI8m8RqKOJ+amN5pbUNy0lJTGC40WdZG7y2ZiVc5dBcxSTPGwqFaLtm4Eb7xNSRpD4bVv42l5aQ5feIs/Q2l/UwKl9RACeHcXQA+RPHdC2L9Xf5Uzunt3PTuxQgxYMbeSTdMmq1TIk9EtAG/J1qXXKzBu+bdsNjbWqOaoVqCDLa3wnRBiUskcPyHhpt2RgCSesm2iOltW1GO5xIU2q1lHRMhylxe1SaBuVE62EyoRwRGGBhR2U3PH1H7T370rbUcVOowEZnCoZXYur4khLDmF5CXpoYM85pd0IdivNswxxGona9Zi4KV9CaOS5qkDZDFErw0XVwWAw2riuBvpfKTM8LX0ZDMaGM+30UYg3ig3J3ytlLiBz87mmR3BFK099536WW63/43Kv7Ovr2cugjKKc0GULEoW2keE5ju6ZgzdqS4XG0rmlIECUvYQakPAtGmPWqWe+bOx3iqiv3ujhLs+L8gt7TvS0cdqMJEbqohMR56AyukNSW7lGhb70WMMCF/3Dac+gLT7z46AcupuXp/Mtz8rC8Xu+5JqUXWWTcEaRIq9RCoVYiHSXpN4F+l7DmdTuSrgLqVqSgyzfcSpTlXWbcaAbnY/88SCOMowAKV44gmpYGFldQkoMQbQUrx7k60oxH4MwKC4eTbv6RC+JX/vCV1/eWTn/GEWxoDRYZTkKdHQG9zJB2EeokAMKTo/Wk1CvkFdCcTAh8C2R69goHy27AjGsRmkIXjpuNNONhsSSn5d7e6x10r3TcefTUSOdFA1laIYRJI8ELypzgmOYkOkyoyZoWhlALctBq5xCctu1zj7+27kcuiNNnVd/wJft3z5o+JUyWDWe4tHqI8YzStED+NGHvSL2jQPZLjdpEF4iCwpm17GDeSg+9l0mXvf9qXY/Ji5acXTZ3nHZpYG2PbjINs2fBEYNoUpxoZYGveXNMJpUUu+qcFs/qMZGHSV/OczvnPIDBD7cxbdGfj/vxdOnlNUJoXWIdb39AZnmpElM9BpyN4UfaELTyz7sGFZbEbN8vxITBwMhGD7seL2QhCane9c8AB17R7QsY3Hs/62HuWSJiCsJs+5zo6HqRUSdghBba32HZu6lDyAs2meGk/9bPrNs88ZF24l3yj5uffhtW3Ef7KrjYYIiT96sj3DDlK0iieqB0Q1HBl1VgziIP3XicsgY1erkXDTgpWg/tKKByUBe/qDgfKWB8W9yflTHTSz+Xw8Jz69B1qoeuHnGcgFIPvoJjqo2DPXOBdGS0uSVnnknub9X7H7vw4Te+8358feBey0vXvfnbz/7y4k+sKAYvI8ZDqrUnrTAiIyGzsprLBbokDKuvn8TvJW2Z6FpoqLETyqsGqsPUaZhmUVeHS5AuCBqDNEdEinovjTvw6o3dP0ZrX9k1EGRBvS5MEm0IOthV6dl84fcHrvyJ9lqu/cHgZ9/Jel5lhmvQnKQOkg3x1oW6ronELQ0VJVZjkm4RLnV7yGoOBp5MYcdjCdTwutStSbFXTRe6OwgTAio5IWqQ56rzGKp7Q6b1goJssLJg15pH3119XDaXnvf9d8/dUV/0DEO7rkeuQZ8mrgfZegh6g4jOa8I4EPMigPn9Febr4JYERrenLIe5S+vRXnTclD4Xt1xkIlR6P60W5KJNStm9Prq7Mv/NVf8w0ndcd9Oe9+jQxW9We79TV5umyXISeE2Yj/eEqEJgX6+LhnhfVQ2Zq0E6O+M1XzrSzgwWnIEM1vHIpmQcPZvV5Vl+T2CeCV1B5y7iGYdsH1/05FkPja48IduHP/XIri/vqC+7cmIKLShCXAmAFoLYgRQKfXT1UMuBF25RHWdeMUZLfBxPV/zKYYUx/a4HM6mmUxM0eQUOa7QiQp5CsLw13febax4euvyE/wnAli+e0hb81KbFHUeWk39yJc1tS1oWJrGdepRrtDCjpp1x7VuuWyJN65kYJeEOTXXtta78M/0Pjgz/r/4JwCs39v7iHLPnoY+1F12UZ3Oeq7mvSZrMxkVy08p8awtJKybQhe/IPDMbzQXPI9NQGckX33Hu3w0+8FP9M57NX1p0Q4cZvu/k9mxOosyyAHRBybiWDXimxZBCyxFbaUUL47kyWk0nj+QLfn/lg/t+or9wOe5/t7Tr9lP7JipT351fGlvTnUKJ94mnuq/Dzmz6zWjX+OYORNCNeUeqkB2qd29xacdNy+/f9+r/zT/UavmM/lZp9v76nD8shanLOtPpntSFNqzZadGeO7zSW5U/hah7dLi5qU5ns4YzaF8/rzP52sJvvrv3hP1l2v+nz/8IMABBqbSZZcgDWQAAAABJRU5ErkJggg==',
        },
        'name':
        getattr(settings, 'WEBAPP_MANIFEST_NAME', 'Marketplace'),
        'size':
        storage.size(package_path),
        'package_path':
        absolutify(reverse('package.zip')),
        #'release_notes': '',
        'version':
        '0.0.1',
    })
    manifest_etag = hashlib.md5(manifest_content).hexdigest()

    @etag(lambda r: manifest_etag)
    def _inner_view(request):
        response = HttpResponse(manifest_content,
                                mimetype='application/x-web-app-manifest+json')
        return response

    return _inner_view(request)
Ejemplo n.º 24
0
def convert(directory, delete=False):
    print 'Converting icons in %s' % directory

    pks = []
    k = 0
    for path, names, filenames in walk_storage(directory):
        for filename in filenames:
            old = os.path.join(path, filename)
            pre, ext = os.path.splitext(old)
            if (pre[-3:] in size_suffixes or ext not in extensions):
                continue

            if not storage.size(old):
                print 'Icon %s is empty, ignoring.' % old
                continue

            for size, size_suffix in zip(sizes, size_suffixes):
                new = '%s%s%s' % (pre, size_suffix, '.png')
                if os.path.exists(new):
                    continue
                resize_image(old, new, (size, size), remove_src=False)

            if ext != '.png':
                pks.append(os.path.basename(pre))

            if delete:
                storage.delete(old)

            k += 1
            if not k % 1000:
                print "... converted %s" % k

    for chunk in chunked(pks, 100):
        Webapp.objects.filter(pk__in=chunk).update(icon_type='image/png')
Ejemplo n.º 25
0
 def _filesize(self):
     if self._filesize_stored != None:
         return self._filesize_stored
     if self.exists():
         self._filesize_stored = default_storage.size(self.path)
         return self._filesize_stored
     return None
Ejemplo n.º 26
0
    def from_upload(cls,
                    upload,
                    version,
                    platform,
                    is_beta=False,
                    parsed_data=None):
        if parsed_data is None:
            parsed_data = {}
        addon = version.addon

        file_ = cls(version=version, platform=platform)
        upload.path = force_bytes(nfd_str(upload.path))
        ext = os.path.splitext(upload.path)[1]
        if ext == '.jar':
            ext = '.xpi'
        file_.filename = file_.generate_filename(extension=ext or '.xpi')
        # Size in bytes.
        file_.size = storage.size(upload.path)
        data = cls.get_jetpack_metadata(upload.path)
        if 'sdkVersion' in data and data['sdkVersion']:
            file_.jetpack_version = data['sdkVersion'][:10]
        file_.no_restart = parsed_data.get('no_restart', False)
        file_.strict_compatibility = parsed_data.get('strict_compatibility',
                                                     False)
        file_.is_multi_package = parsed_data.get('is_multi_package', False)
        file_.is_experiment = parsed_data.get('is_experiment', False)
        file_.is_webextension = parsed_data.get('is_webextension', False)

        if (is_beta and addon.status == amo.STATUS_PUBLIC
                and version.channel == amo.RELEASE_CHANNEL_LISTED):
            file_.status = amo.STATUS_BETA

        file_.hash = file_.generate_hash(upload.path)
        file_.original_hash = file_.hash

        if upload.validation:
            validation = json.loads(upload.validation)
            if validation['metadata'].get('requires_chrome'):
                file_.requires_chrome = True

        file_.save()
        if file_.is_webextension:
            permissions = list(parsed_data.get('permissions', []))
            # Add content_scripts host matches too.
            for script in parsed_data.get('content_scripts', []):
                permissions.extend(script.get('matches', []))
            if permissions:
                WebextPermission.objects.create(permissions=permissions,
                                                file=file_)

        log.debug('New file: %r from %r' % (file_, upload))
        # Move the uploaded file from the temp location.
        copy_stored_file(
            upload.path,
            os.path.join(version.path_prefix, nfd_str(file_.filename)))

        if upload.validation:
            FileValidation.from_json(file_, validation)

        return file_
Ejemplo n.º 27
0
    def from_upload(cls, upload, version, platform, parsed_data=None):
        """
        Create a File instance from a FileUpload, a Version, a platform id
        and the parsed_data generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results."""
        assert parsed_data is not None

        file_ = cls(version=version, platform=platform)
        upload.path = force_bytes(nfd_str(upload.path))
        ext = os.path.splitext(upload.path)[1]
        if ext == '.jar':
            ext = '.xpi'
        file_.filename = file_.generate_filename(extension=ext or '.xpi')
        # Size in bytes.
        file_.size = storage.size(upload.path)
        data = cls.get_jetpack_metadata(upload.path)
        if 'sdkVersion' in data and data['sdkVersion']:
            file_.jetpack_version = data['sdkVersion'][:10]
        file_.is_restart_required = parsed_data.get('is_restart_required',
                                                    False)
        file_.strict_compatibility = parsed_data.get('strict_compatibility',
                                                     False)
        file_.is_multi_package = parsed_data.get('is_multi_package', False)
        file_.is_experiment = parsed_data.get('is_experiment', False)
        file_.is_webextension = parsed_data.get('is_webextension', False)
        file_.is_mozilla_signed_extension = parsed_data.get(
            'is_mozilla_signed_extension', False)

        file_.hash = file_.generate_hash(upload.path)
        file_.original_hash = file_.hash

        if upload.validation:
            validation = json.loads(upload.validation)
            if validation['metadata'].get('requires_chrome'):
                file_.requires_chrome = True

        file_.save()
        if file_.is_webextension:
            permissions = list(parsed_data.get('permissions', []))
            # Add content_scripts host matches too.
            for script in parsed_data.get('content_scripts', []):
                permissions.extend(script.get('matches', []))
            if permissions:
                WebextPermission.objects.create(permissions=permissions,
                                                file=file_)

        log.debug('New file: %r from %r' % (file_, upload))
        # Move the uploaded file from the temp location.
        copy_stored_file(
            upload.path,
            os.path.join(version.path_prefix, nfd_str(file_.filename)))

        if upload.validation:
            FileValidation.from_json(file_, validation)

        return file_
Ejemplo n.º 28
0
    def from_upload(cls, upload, version, platform, parsed_data=None):
        """
        Create a File instance from a FileUpload, a Version, a platform id
        and the parsed_data generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results."""
        assert parsed_data is not None

        file_ = cls(version=version, platform=platform)
        upload.path = force_bytes(nfd_str(upload.path))
        ext = os.path.splitext(upload.path)[1]
        if ext == '.jar':
            ext = '.xpi'
        file_.filename = file_.generate_filename(extension=ext or '.xpi')
        # Size in bytes.
        file_.size = storage.size(upload.path)
        data = cls.get_jetpack_metadata(upload.path)
        if 'sdkVersion' in data and data['sdkVersion']:
            file_.jetpack_version = data['sdkVersion'][:10]
        file_.is_restart_required = parsed_data.get(
            'is_restart_required', False)
        file_.strict_compatibility = parsed_data.get(
            'strict_compatibility', False)
        file_.is_multi_package = parsed_data.get('is_multi_package', False)
        file_.is_experiment = parsed_data.get('is_experiment', False)
        file_.is_webextension = parsed_data.get('is_webextension', False)
        file_.is_mozilla_signed_extension = parsed_data.get(
            'is_mozilla_signed_extension', False)

        file_.hash = file_.generate_hash(upload.path)
        file_.original_hash = file_.hash

        if upload.validation:
            validation = json.loads(upload.validation)
            if validation['metadata'].get('requires_chrome'):
                file_.requires_chrome = True

        file_.save()
        if file_.is_webextension:
            permissions = list(parsed_data.get('permissions', []))
            # Add content_scripts host matches too.
            for script in parsed_data.get('content_scripts', []):
                permissions.extend(script.get('matches', []))
            if permissions:
                WebextPermission.objects.create(permissions=permissions,
                                                file=file_)

        log.debug('New file: %r from %r' % (file_, upload))
        # Move the uploaded file from the temp location.
        copy_stored_file(
            upload.path,
            os.path.join(version.path_prefix, nfd_str(file_.filename)))

        if upload.validation:
            FileValidation.from_json(file_, validation)

        return file_
Ejemplo n.º 29
0
def sign_file(file_obj):
    """Sign a File if necessary.

    If it's not necessary (file exists but it's a mozilla signed one) then
    return the file directly.

    If there's no endpoint (signing is not enabled) or isn't reviewed yet,
    or there was an error while signing, raise an exception - it
    shouldn't happen.

    Otherwise proceed with signing and return the signed file.
    """
    from olympia.git.utils import create_git_extraction_entry

    if not settings.ENABLE_ADDON_SIGNING:
        raise SigningError('Not signing file {0}: no active endpoint'.format(
            file_obj.pk))

    # No file? No signature.
    if not os.path.exists(file_obj.current_file_path):
        raise SigningError("File {0} doesn't exist on disk".format(
            file_obj.current_file_path))

    # Don't sign Mozilla signed extensions (they're already signed).
    if file_obj.is_mozilla_signed_extension:
        # Don't raise an exception here, just log and return file_obj even
        # though we didn't sign, it's not an error - we just don't need to do
        # anything in this case.
        log.info('Not signing file {0}: mozilla signed extension is already '
                 'signed'.format(file_obj.pk))
        return file_obj

    # We only sign files that are compatible with Firefox.
    if not supports_firefox(file_obj):
        raise SigningError(
            'Not signing version {0}: not for a Firefox version we support'.
            format(file_obj.version.pk))

    # Sign the file. If there's any exception, we skip the rest.
    cert_serial_num = str(call_signing(file_obj))

    size = storage.size(file_obj.current_file_path)

    # Save the certificate serial number for revocation if needed, and re-hash
    # the file now that it's been signed.
    file_obj.update(
        cert_serial_num=cert_serial_num,
        hash=file_obj.generate_hash(),
        is_signed=True,
        size=size,
    )
    log.info('Signing complete for file {0}'.format(file_obj.pk))

    if waffle.switch_is_active('enable-uploads-commit-to-git-storage'):
        # Schedule this version for git extraction.
        transaction.on_commit(
            lambda: create_git_extraction_entry(version=file_obj.version))

    return file_obj
Ejemplo n.º 30
0
def sign_file(file_obj):
    """Sign a File if necessary.

    If it's not necessary (file exists but it's a mozilla signed one, or it's
    a search plugin) then return the file directly.

    If there's no endpoint (signing is not enabled) or isn't reviewed yet,
    or there was an error while signing, raise an exception - it
    shouldn't happen.

    Otherwise proceed with signing and return the signed file.
    """
    if (file_obj.version.addon.type == amo.ADDON_SEARCH
            and file_obj.version.is_webextension is False):
        # Those aren't meant to be signed, we shouldn't be here.
        return file_obj

    if not settings.ENABLE_ADDON_SIGNING:
        raise SigningError(u'Not signing file {0}: no active endpoint'.format(
            file_obj.pk))

    # No file? No signature.
    if not os.path.exists(file_obj.current_file_path):
        raise SigningError(u'File {0} doesn\'t exist on disk'.format(
            file_obj.current_file_path))

    # Don't sign Mozilla signed extensions (they're already signed).
    if file_obj.is_mozilla_signed_extension:
        # Don't raise an exception here, just log and return file_obj even
        # though we didn't sign, it's not an error - we just don't need to do
        # anything in this case.
        log.info(u'Not signing file {0}: mozilla signed extension is already '
                 u'signed'.format(file_obj.pk))
        return file_obj

    # Don't sign multi-package XPIs. Their inner add-ons need to be signed.
    if file_obj.is_multi_package:
        raise SigningError(u'Not signing file {0}: multi-package XPI'.format(
            file_obj.pk))

    # We only sign files that are compatible with Firefox.
    if not supports_firefox(file_obj):
        raise SigningError(
            u'Not signing version {0}: not for a Firefox version we support'.
            format(file_obj.version.pk))

    # Sign the file. If there's any exception, we skip the rest.
    cert_serial_num = six.text_type(call_signing(file_obj))

    size = storage.size(file_obj.current_file_path)

    # Save the certificate serial number for revocation if needed, and re-hash
    # the file now that it's been signed.
    file_obj.update(cert_serial_num=cert_serial_num,
                    hash=file_obj.generate_hash(),
                    is_signed=True,
                    size=size)
    log.info(u'Signing complete for file {0}'.format(file_obj.pk))
    return file_obj
Ejemplo n.º 31
0
    def from_upload(cls, upload, version, platform, is_beta=False,
                    parsed_data=None):
        if parsed_data is None:
            parsed_data = {}
        addon = version.addon

        file_ = cls(version=version, platform=platform)
        upload.path = force_bytes(nfd_str(upload.path))
        ext = os.path.splitext(upload.path)[1]
        if ext == '.jar':
            ext = '.xpi'
        file_.filename = file_.generate_filename(extension=ext or '.xpi')
        # Size in bytes.
        file_.size = storage.size(upload.path)
        data = cls.get_jetpack_metadata(upload.path)
        if 'sdkVersion' in data and data['sdkVersion']:
            file_.jetpack_version = data['sdkVersion'][:10]
        file_.is_restart_required = parsed_data.get(
            'is_restart_required', False)
        file_.strict_compatibility = parsed_data.get(
            'strict_compatibility', False)
        file_.is_multi_package = parsed_data.get('is_multi_package', False)
        file_.is_experiment = parsed_data.get('is_experiment', False)
        file_.is_webextension = parsed_data.get('is_webextension', False)
        file_.is_mozilla_signed_extension = parsed_data.get(
            'is_mozilla_signed_extension', False)

        if (is_beta and addon.status == amo.STATUS_PUBLIC and
                version.channel == amo.RELEASE_CHANNEL_LISTED):
            file_.status = amo.STATUS_BETA

        file_.hash = file_.generate_hash(upload.path)
        file_.original_hash = file_.hash

        if upload.validation:
            validation = json.loads(upload.validation)
            if validation['metadata'].get('requires_chrome'):
                file_.requires_chrome = True

        file_.save()
        if file_.is_webextension:
            permissions = list(parsed_data.get('permissions', []))
            # Add content_scripts host matches too.
            for script in parsed_data.get('content_scripts', []):
                permissions.extend(script.get('matches', []))
            if permissions:
                WebextPermission.objects.create(permissions=permissions,
                                                file=file_)

        log.debug('New file: %r from %r' % (file_, upload))
        # Move the uploaded file from the temp location.
        copy_stored_file(
            upload.path,
            os.path.join(version.path_prefix, nfd_str(file_.filename)))

        if upload.validation:
            FileValidation.from_json(file_, validation)

        return file_
Ejemplo n.º 32
0
    def from_upload(cls, upload, version, platform, is_beta=False,
                    parse_data={}):
        addon = version.addon

        file_ = cls(version=version, platform=platform)
        upload.path = amo.utils.smart_path(nfd_str(upload.path))
        ext = os.path.splitext(upload.path)[1]
        if ext == '.jar':
            ext = '.xpi'
        file_.filename = file_.generate_filename(extension=ext or '.xpi')
        # Size in bytes.
        file_.size = storage.size(upload.path)
        data = cls.get_jetpack_metadata(upload.path)
        if 'sdkVersion' in data and data['sdkVersion']:
            file_.jetpack_version = data['sdkVersion'][:10]
        if file_.jetpack_version:
            Tag(tag_text='jetpack').save_tag(addon)
        file_.builder_version = data['builderVersion']
        file_.no_restart = parse_data.get('no_restart', False)
        file_.strict_compatibility = parse_data.get('strict_compatibility',
                                                    False)
        file_.is_multi_package = parse_data.get('is_multi_package', False)

        if is_beta and addon.status == amo.STATUS_PUBLIC:
            file_.status = amo.STATUS_BETA
        elif addon.trusted:
            # New files in trusted add-ons automatically receive the correct
            # approved status for their review class.
            if addon.status == amo.STATUS_PUBLIC:
                file_.status = amo.STATUS_PUBLIC
            elif addon.status in amo.LITE_STATUSES:
                file_.status = amo.STATUS_LITE

        file_.hash = file_.generate_hash(upload.path)

        if upload.validation:
            validation = json.loads(upload.validation)
            if validation['metadata'].get('requires_chrome'):
                file_.requires_chrome = True

        file_.save()

        log.debug('New file: %r from %r' % (file_, upload))
        # Move the uploaded file from the temp location.
        destinations = [version.path_prefix]
        if file_.status in amo.MIRROR_STATUSES:
            destinations.append(version.mirror_path_prefix)
        for dest in destinations:
            copy_stored_file(upload.path,
                             os.path.join(dest, nfd_str(file_.filename)))

        if upload.validation:
            # Import loop.
            from devhub.tasks import annotate_validation_results

            validation = annotate_validation_results(validation)
            FileValidation.from_json(file_, validation)

        return file_
Ejemplo n.º 33
0
 def testGzippedSize(self):
     content = "foo" * 4096
     with self.settings(AWS_S3_GZIP=False):
         name = "foo/bar.txt"
         with self.save_file(name=name, content=content):
             meta = default_storage.meta(name)
             self.assertNotEqual(meta.get("ContentEncoding", ""), "gzip")
             self.assertNotIn("uncompressed_size", meta["Metadata"])
             self.assertEqual(default_storage.size(name), len(content))
     with self.settings(AWS_S3_GZIP=True):
         name = "foo/bar.txt.gz"
         with self.save_file(name=name, content=content):
             meta = default_storage.meta(name)
             self.assertEqual(meta["ContentEncoding"], "gzip")
             self.assertIn("uncompressed_size", meta["Metadata"])
             self.assertEqual(meta["Metadata"], {"uncompressed_size": str(len(content))})
             self.assertEqual(default_storage.size(name), len(content))
Ejemplo n.º 34
0
 def test_byte_contents_when_closing_file(self):
     filename = u'filebytes\u00A3.jpg'
     file = default_storage.open(filename, 'wb')
     file.write(DUMMY_IMAGE)
     file.close()
     self.assertEqual(default_storage.size(filename), file.size)
     default_storage.delete(filename)
     self.assert_(not default_storage.exists(filename))
Ejemplo n.º 35
0
def index(request):
    filenames = default_storage.listdir('')[1]
    file_list = [{
        'filename': filename,
        'filesize': default_storage.size(filename)
    } for filename in filenames]

    return render(request, 'browse/index.html', {'file_list': file_list})
Ejemplo n.º 36
0
def generate_response(filepath, content_type, filename=None):
    filename = filename or os.path.basename(filepath)
    response = StreamingHttpResponse(export_iterator(filepath), content_type=content_type)
    response['Content-Length'] = default_storage.size(filepath)
    response['Content-Disposition'] = "attachment; filename=%s" % filename
    response.set_cookie(key='fileDownload', value="true")
    response.set_cookie(key='path', value="/")
    return response
Ejemplo n.º 37
0
    def from_upload(cls, upload, version, platform, is_beta=False,
                    parse_data={}):
        addon = version.addon

        file_ = cls(version=version, platform=platform)
        upload.path = smart_path(nfd_str(upload.path))
        ext = os.path.splitext(upload.path)[1]
        if ext == '.jar':
            ext = '.xpi'
        file_.filename = file_.generate_filename(extension=ext or '.xpi')
        # Size in bytes.
        file_.size = storage.size(upload.path)
        data = cls.get_jetpack_metadata(upload.path)
        if 'sdkVersion' in data and data['sdkVersion']:
            file_.jetpack_version = data['sdkVersion'][:10]
        if file_.jetpack_version:
            Tag(tag_text='jetpack').save_tag(addon)
        file_.no_restart = parse_data.get('no_restart', False)
        file_.strict_compatibility = parse_data.get('strict_compatibility',
                                                    False)
        file_.is_multi_package = parse_data.get('is_multi_package', False)
        file_.is_experiment = parse_data.get('is_experiment', False)
        file_.is_webextension = parse_data.get('is_webextension', False)

        if is_beta and addon.status == amo.STATUS_PUBLIC:
            file_.status = amo.STATUS_BETA

        file_.hash = file_.generate_hash(upload.path)
        file_.original_hash = file_.hash

        if upload.validation:
            validation = json.loads(upload.validation)
            if validation['metadata'].get('requires_chrome'):
                file_.requires_chrome = True

        file_.save()

        log.debug('New file: %r from %r' % (file_, upload))
        # Move the uploaded file from the temp location.
        destinations = [version.path_prefix]
        if file_.status in amo.MIRROR_STATUSES:
            destinations.append(version.mirror_path_prefix)
        for dest in destinations:
            copy_stored_file(upload.path,
                             os.path.join(dest, nfd_str(file_.filename)))

        if upload.validation:
            # Import loop.
            from olympia.devhub.tasks import annotate_validation_results
            from olympia.devhub.utils import ValidationAnnotator

            validation = annotate_validation_results(validation)
            FileValidation.from_json(file_, validation)

            # Copy annotations from any previously approved file.
            ValidationAnnotator(file_).update_annotations()

        return file_
Ejemplo n.º 38
0
def sign_file(file_obj, use_autograph=False):
    """Sign a File.

    If there's no endpoint (signing is not enabled), or the file is a hotfix,
    or isn't reviewed yet, or there was an error while signing, log and return
    nothing.

    Otherwise return the signed file.
    """
    if not settings.SIGNING_SERVER or not settings.ENABLE_ADDON_SIGNING:
        log.info(u'Not signing file {0}: no active endpoint'.format(
            file_obj.pk))
        return

    # No file? No signature.
    if not os.path.exists(file_obj.file_path):
        log.info(u'File {0} doesn\'t exist on disk'.format(file_obj.file_path))
        return

    # Don't sign hotfixes.
    if file_obj.version.addon.guid in settings.HOTFIX_ADDON_GUIDS:
        log.info(u'Not signing file {0}: addon is a hotfix'.format(
            file_obj.pk))
        return

    # Don't sign Mozilla signed extensions (they're already signed).
    if file_obj.is_mozilla_signed_extension:
        log.info(u'Not signing file {0}: mozilla signed extension is already '
                 u'signed'.format(file_obj.pk))
        return

    # Don't sign multi-package XPIs. Their inner add-ons need to be signed.
    if file_obj.is_multi_package:
        log.info(u'Not signing file {0}: multi-package XPI'.format(
            file_obj.pk))
        return

    # We only sign files that are compatible with Firefox.
    if not supports_firefox(file_obj):
        log.info(
            u'Not signing version {0}: not for a Firefox version we support'.
            format(file_obj.version.pk))
        return

    # Sign the file. If there's any exception, we skip the rest.
    cert_serial_num = unicode(
        call_signing(file_obj, use_autograph=use_autograph))

    size = storage.size(file_obj.file_path)

    # Save the certificate serial number for revocation if needed, and re-hash
    # the file now that it's been signed.
    file_obj.update(cert_serial_num=cert_serial_num,
                    hash=file_obj.generate_hash(),
                    is_signed=True,
                    size=size)
    log.info(u'Signing complete for file {0}'.format(file_obj.pk))
    return file_obj
Ejemplo n.º 39
0
def handle_upload_validation_result(results, upload_pk, channel):
    """Annotate a set of validation results and save them to the given
    FileUpload instance."""
    upload = FileUpload.objects.get(pk=upload_pk)

    if not upload.addon_id:
        results = annotate_new_legacy_addon_restrictions(results=results)
    elif upload.addon_id and upload.version:
        results = annotate_webext_incompatibilities(
            results=results, file_=None, addon=upload.addon,
            version_string=upload.version, channel=channel)

    results = skip_signing_warning_if_signing_server_not_configured(results)
    upload.validation = json.dumps(results)
    upload.save()  # We want to hit the custom save().

    # Track the time it took from first upload through validation
    # until the results were processed and saved.
    upload_start = utc_millesecs_from_epoch(upload.created)
    now = datetime.datetime.now()
    now_ts = utc_millesecs_from_epoch(now)
    delta = now_ts - upload_start
    statsd.timing('devhub.validation_results_processed', delta)

    if not storage.exists(upload.path):
        # TODO: actually fix this so we can get stats. It seems that
        # the file maybe gets moved but it needs more investigation.
        log.warning('Scaled upload stats were not tracked. File is '
                    'missing: {}'.format(upload.path))
        return

    size = Decimal(storage.size(upload.path))
    megabyte = Decimal(1024 * 1024)

    # Stash separate metrics for small / large files.
    quantifier = 'over' if size > megabyte else 'under'
    statsd.timing(
        'devhub.validation_results_processed_{}_1mb'.format(quantifier), delta)

    # Scale the upload / processing time by package size (in MB)
    # so we can normalize large XPIs which naturally take longer to validate.
    scaled_delta = None
    size_in_mb = size / megabyte
    if size > 0:
        # If the package is smaller than 1MB, don't scale it. This should
        # help account for validator setup time.
        unit = size_in_mb if size > megabyte else Decimal(1)
        scaled_delta = Decimal(delta) / unit
        statsd.timing('devhub.validation_results_processed_per_mb',
                      scaled_delta)

    log.info('Time to process and save upload validation; '
             'upload.pk={upload}; processing_time={delta}; '
             'scaled_per_mb={scaled}; upload_size_in_mb={size_in_mb}; '
             'created={created}; now={now}'
             .format(delta=delta, upload=upload.pk,
                     created=upload.created, now=now,
                     scaled=scaled_delta, size_in_mb=size_in_mb))
Ejemplo n.º 40
0
def handle_upload_validation_result(results, upload_pk, channel, is_mozilla_signed):
    """Annotate a set of validation results and save them to the given
    FileUpload instance.
    """
    upload = FileUpload.objects.get(pk=upload_pk)
    upload.validation = json.dumps(results)
    upload.save()  # We want to hit the custom save().

    # Track the time it took from first upload through validation
    # until the results were processed and saved.
    upload_start = utc_millesecs_from_epoch(upload.created)
    now = datetime.datetime.now()
    now_ts = utc_millesecs_from_epoch(now)
    delta = now_ts - upload_start
    statsd.timing('devhub.validation_results_processed', delta)

    if not storage.exists(upload.path):
        # TODO: actually fix this so we can get stats. It seems that
        # the file maybe gets moved but it needs more investigation.
        log.warning(
            'Scaled upload stats were not tracked. File is '
            'missing: {}'.format(upload.path)
        )
        return

    size = Decimal(storage.size(upload.path))
    megabyte = Decimal(1024 * 1024)

    # Stash separate metrics for small / large files.
    quantifier = 'over' if size > megabyte else 'under'
    statsd.timing(
        'devhub.validation_results_processed_{}_1mb'.format(quantifier), delta
    )

    # Scale the upload / processing time by package size (in MB)
    # so we can normalize large XPIs which naturally take longer to validate.
    scaled_delta = None
    size_in_mb = size / megabyte
    if size > 0:
        # If the package is smaller than 1MB, don't scale it. This should
        # help account for validator setup time.
        unit = size_in_mb if size > megabyte else Decimal(1)
        scaled_delta = Decimal(delta) / unit
        statsd.timing('devhub.validation_results_processed_per_mb', scaled_delta)

    log.info(
        'Time to process and save upload validation; '
        'upload.pk={upload}; processing_time={delta}; '
        'scaled_per_mb={scaled}; upload_size_in_mb={size_in_mb}; '
        'created={created}; now={now}'.format(
            delta=delta,
            upload=upload.pk,
            created=upload.created,
            now=now,
            scaled=scaled_delta,
            size_in_mb=size_in_mb,
        )
    )
Ejemplo n.º 41
0
def sign_file(file_obj):
    """Sign a File.

    If there's no endpoint (signing is not enabled), or the file is a hotfix,
    or isn't reviewed yet, or there was an error while signing, log and return
    nothing.

    Otherwise return the signed file.
    """
    if not settings.SIGNING_SERVER or not settings.ENABLE_ADDON_SIGNING:
        log.info(u'Not signing file {0}: no active endpoint'.format(
            file_obj.pk))
        return

    # No file? No signature.
    if not os.path.exists(file_obj.file_path):
        log.info(u'File {0} doesn\'t exist on disk'.format(file_obj.file_path))
        return

    # Don't sign hotfixes.
    if file_obj.version.addon.guid in settings.HOTFIX_ADDON_GUIDS:
        log.info(u'Not signing file {0}: addon is a hotfix'.format(
            file_obj.pk))
        return

    # Don't sign Mozilla signed extensions (they're already signed).
    if file_obj.is_mozilla_signed_extension:
        log.info(u'Not signing file {0}: mozilla signed extension is already '
                 u'signed'.format(file_obj.pk))
        return

    # Don't sign multi-package XPIs. Their inner add-ons need to be signed.
    if file_obj.is_multi_package:
        log.info(u'Not signing file {0}: multi-package XPI'.format(
            file_obj.pk))
        return

    # We only sign files that are compatible with Firefox.
    if not supports_firefox(file_obj):
        log.info(
            u'Not signing version {0}: not for a Firefox version we support'
            .format(file_obj.version.pk))
        return

    # Sign the file. If there's any exception, we skip the rest.
    cert_serial_num = unicode(call_signing(file_obj))

    size = storage.size(file_obj.file_path)

    # Save the certificate serial number for revocation if needed, and re-hash
    # the file now that it's been signed.
    file_obj.update(cert_serial_num=cert_serial_num,
                    hash=file_obj.generate_hash(),
                    is_signed=True,
                    size=size)
    log.info(u'Signing complete for file {0}'.format(file_obj.pk))
    return file_obj
Ejemplo n.º 42
0
def generate_response(filepath, content_type, filename=None):
    filename = filename or os.path.basename(filepath)
    response = StreamingHttpResponse(export_iterator(filepath),
                                     content_type=content_type)
    response['Content-Length'] = default_storage.size(filepath)
    response['Content-Disposition'] = "attachment; filename=%s" % filename
    response.set_cookie(key='fileDownload', value="true")
    response.set_cookie(key='path', value="/")
    return response
Ejemplo n.º 43
0
    def test_extract_header_img(self):
        data = {'images': {'headerURL': 'weta.png'}}
        dest_path = tempfile.mkdtemp()
        header_file = dest_path + '/weta.png'
        assert not default_storage.exists(header_file)

        utils.extract_header_img(self.file_obj, data, dest_path)
        assert default_storage.exists(header_file)
        assert default_storage.size(header_file) == 126447
Ejemplo n.º 44
0
 def item_enclosure_length(self, item):
     """Try to obtains the size of the enclosure if the image
     is present on the FS, otherwise returns an hardcoded value"""
     if item.image:
         try:
             return str(default_storage.size(item.image.path))
         except (os.error, NotImplementedError):
             pass
     return '100000'
Ejemplo n.º 45
0
 def item_enclosure_length(self, item):
     """Try to obtains the size of the enclosure if the image
     is present on the FS, otherwise returns an hardcoded value"""
     if item.image:
         try:
             return str(default_storage.size(item.image.path))
         except (os.error, NotImplementedError):
             pass
     return '100000'
Ejemplo n.º 46
0
    def from_upload(cls, upload, version, platform, is_beta=False,
                    parsed_data=None):
        if parsed_data is None:
            parsed_data = {}
        addon = version.addon

        file_ = cls(version=version, platform=platform)
        upload.path = force_bytes(nfd_str(upload.path))
        ext = os.path.splitext(upload.path)[1]
        if ext == '.jar':
            ext = '.xpi'
        file_.filename = file_.generate_filename(extension=ext or '.xpi')
        # Size in bytes.
        file_.size = storage.size(upload.path)
        data = cls.get_jetpack_metadata(upload.path)
        if 'sdkVersion' in data and data['sdkVersion']:
            file_.jetpack_version = data['sdkVersion'][:10]
        file_.no_restart = parsed_data.get('no_restart', False)
        file_.strict_compatibility = parsed_data.get('strict_compatibility',
                                                     False)
        file_.is_multi_package = parsed_data.get('is_multi_package', False)
        file_.is_experiment = parsed_data.get('is_experiment', False)
        file_.is_webextension = parsed_data.get('is_webextension', False)

        if (is_beta and addon.status == amo.STATUS_PUBLIC and
                version.channel == amo.RELEASE_CHANNEL_LISTED):
            file_.status = amo.STATUS_BETA

        file_.hash = file_.generate_hash(upload.path)
        file_.original_hash = file_.hash

        if upload.validation:
            validation = json.loads(upload.validation)
            if validation['metadata'].get('requires_chrome'):
                file_.requires_chrome = True

        file_.save()

        log.debug('New file: %r from %r' % (file_, upload))
        # Move the uploaded file from the temp location.
        copy_stored_file(
            upload.path,
            os.path.join(version.path_prefix, nfd_str(file_.filename)))

        if upload.validation:
            # Import loop.
            from olympia.devhub.tasks import annotate_validation_results
            from olympia.devhub.utils import ValidationAnnotator

            validation = annotate_validation_results(validation)
            FileValidation.from_json(file_, validation)

            # Copy annotations from any previously approved file.
            ValidationAnnotator(file_).update_annotations()

        return file_
Ejemplo n.º 47
0
 def get(self, request, *args, **kwargs):
     path = self.get_path()
     try:
         requested_file = default_storage.open(path)
     except IOError:
         return page_not_found(request)
     response = HttpResponse(FileWrapper(requested_file), content_type=guess_type(path))
     response["Content-Disposition"] = "attachment"
     response["Content-Length"] = default_storage.size(path)
     return response
Ejemplo n.º 48
0
def upload(request):
    print request.session['user']
    if request.POST:
            print 'VIEWS: In upload'
            payload=''
            headers=''
            headers = {'content-type': 'application/json'}
            contentName = request.POST['name']
            subCategory = request.POST['sub_category']
            description = request.POST['description']
            file = request.FILES['fileUpload']
            print 'file is posted from html'
            fileContent = file.read()
            print 'file read'
            path = default_storage.save(file.name, ContentFile(fileContent))
            print 'file is saved'
            print path 
            print default_storage.size(path)
            
            if path.find('pdf')>=0:
                print 'pdf uploaded'
                permalink = 'https://lh5.googleusercontent.com/-0ccxnhKgDkI/U0H2ASDlXyI/AAAAAAAABPw/f3TnC_FBgv0/s256-no/pdf-bbrfoundation.org_.png'
            elif path.find('mp4')>=0:
                print 'video uploaded'
                permalink = 'https://lh3.googleusercontent.com/-KRxBQzU8i2w/U0H19HjfN_I/AAAAAAAABPY/-wRt_DnBh0Q/s512-no/HiRes.jpg'
            else:
                print 'general file uploaded'
                permalink = 'http://www4.uwsp.edu/education/lwilson/newstuff/graphics/MCj03825740000[1].jpg'
            #print default_storage.open(path).read()
            

            user = request.session['user']
          
            username = user['username']
                        

            payload = {'Name':contentName, 'fileName':path, 'Description':description, 'sub_category':subCategory, "prof_username":username, "link":"", "Feedback":[], "RatingCount":0, "AverageRating": 0, "NoOfPeopleRated":0, "Type":"", "permalink": permalink, "count":0, "Questions":[]}

            print payload
            status=requests.post(url='http://localhost:8080/uploadContent',data=json.dumps(payload), headers=headers)
            print status.status_code
            return HttpResponseRedirect("/professorDashboard")
    return render_to_response('uploadContent.html', context_instance=RequestContext(request))
Ejemplo n.º 49
0
def recalc_hash(request, file_id):

    file = get_object_or_404(File, pk=file_id)
    file.size = storage.size(file.file_path)
    file.hash = file.generate_hash()
    file.save()

    log.info("Recalculated hash for file ID %d" % file.id)
    messages.success(request, "File hash and size recalculated for file %d." % file.id)
    return {"success": 1}
Ejemplo n.º 50
0
def test_recreate_previews(pngcrush_image_mock):
    addon = addon_factory()
    # Set up the preview so it has files in the right places.
    preview_no_original = Preview.objects.create(addon=addon)
    with storage.open(preview_no_original.image_path, 'wb') as dest:
        shutil.copyfileobj(open(get_image_path('preview_landscape.jpg'), 'rb'),
                           dest)
    with storage.open(preview_no_original.thumbnail_path, 'wb') as dest:
        shutil.copyfileobj(open(get_image_path('mozilla.png'), 'rb'), dest)
    # And again but this time with an "original" image.
    preview_has_original = Preview.objects.create(addon=addon)
    with storage.open(preview_has_original.image_path, 'wb') as dest:
        shutil.copyfileobj(open(get_image_path('preview_landscape.jpg'), 'rb'),
                           dest)
    with storage.open(preview_has_original.thumbnail_path, 'wb') as dest:
        shutil.copyfileobj(open(get_image_path('mozilla.png'), 'rb'), dest)
    with storage.open(preview_has_original.original_path, 'wb') as dest:
        shutil.copyfileobj(open(get_image_path('teamaddons.jpg'), 'rb'), dest)

    tasks.recreate_previews([addon.id])

    assert preview_no_original.reload().sizes == {
        'image': [533, 400],
        'thumbnail': [533, 400]
    }
    # Check no resize for full size, but resize happened for thumbnail
    assert (storage.size(preview_no_original.image_path) == storage.size(
        get_image_path('preview_landscape.jpg')))
    assert (storage.size(preview_no_original.thumbnail_path) != storage.size(
        get_image_path('mozilla.png')))

    assert preview_has_original.reload().sizes == {
        'image': [2400, 1600],
        'thumbnail': [640, 427],
        'original': [3000, 2000]
    }
    # Check both full and thumbnail changed, but original didn't.
    assert (storage.size(preview_has_original.image_path) != storage.size(
        get_image_path('preview_landscape.jpg')))
    assert (storage.size(preview_has_original.thumbnail_path) != storage.size(
        get_image_path('mozilla.png')))
    assert (storage.size(preview_has_original.original_path) == storage.size(
        get_image_path('teamaddons.jpg')))
Ejemplo n.º 51
0
def descargar(request,archivoo):    
    try:
        archivo = default_storage.open(archivoo)
    except:
        raise Http404
    reporte = HttpResponse(content_type='application/octet-stream')
    reporte['Content-Disposition'] = 'attachment; filename="%s"'%archivo.name[archivo.name.rfind('/')+1:]
    reporte['Content-Length'] = default_storage.size(archivo.name)
    reporte.write(archivo.read())
    return reporte
Ejemplo n.º 52
0
    def studio_submit(self, request, suffix=''):
        self.display_name = request.params['display_name']
        self.width = request.params['width']
        self.height = request.params['height']
        # self.has_score = request.params['has_score']
        self.icon_class = 'problem'

        # if self.has_score == 'True' else 'video'

        if hasattr(request.params['file'], 'file'):
            scorm_file = request.params['file'].file

            # First, save scorm file in the storage for mobile clients
            self.scorm_file_meta['sha1'] = self.get_sha1(scorm_file)
            self.scorm_file_meta['name'] = scorm_file.name
            self.scorm_file_meta['path'] = path = self._file_storage_path()
            self.scorm_file_meta['last_updated'] = timezone.now().strftime(
                DateTime.DATETIME_FORMAT)

            if default_storage.exists(path):
                log.info('Removing previously uploaded "{}"'.format(path))
                default_storage.delete(path)

            default_storage.save(path, File(scorm_file))
            self.scorm_file_meta['size'] = default_storage.size(path)
            log.info('"{}" file stored at "{}"'.format(scorm_file, path))

            # Check whether SCORM_ROOT exists
            if not os.path.exists(SCORM_ROOT):
                os.mkdir(SCORM_ROOT)

            # Now unpack it into SCORM_ROOT to serve to students later
            path_to_file = os.path.join(SCORM_ROOT, self.location.block_id)

            if os.path.exists(path_to_file):
                shutil.rmtree(path_to_file)

            if hasattr(scorm_file, 'temporary_file_path'):
                os.system('unzip {} -d {}'.format(
                    scorm_file.temporary_file_path(), path_to_file))
            else:
                temporary_path = os.path.join(SCORM_ROOT, scorm_file.name)
                temporary_zip = open(temporary_path, 'wb')
                scorm_file.open()
                temporary_zip.write(scorm_file.read())
                temporary_zip.close()
                os.system('unzip {} -d {}'.format(temporary_path,
                                                  path_to_file))
                os.remove(temporary_path)

            self.set_fields_xblock(path_to_file)

        return Response(json.dumps({'result': 'success'}),
                        content_type='application/json')
Ejemplo n.º 53
0
 def get_upload_file(self, ext):
     """Return an upload file to test the form."""
     filename = self.get_course_zip()
     return InMemoryUploadedFile(
         file=default_storage.open(filename, "rb"),
         field_name="course_file",
         name="toy{0}".format(ext),
         content_type="application/zip",
         size=default_storage.size(filename),
         charset="utf-8",
     )
Ejemplo n.º 54
0
 def get(self, request, *args, **kwargs):
     path = self.get_path()
     try:
         requested_file = default_storage.open(path)
     except IOError:
         return page_not_found(request)
     response = HttpResponse(FileWrapper(requested_file),
                             content_type=guess_type(path))
     response['Content-Disposition'] = 'attachment'
     response['Content-Length'] = default_storage.size(path)
     return response
Ejemplo n.º 55
0
def sign_file(file_obj):
    """Sign a File if necessary.

    If it's not necessary (file exists but it's a mozilla signed one, or it's
    a search plugin) then return the file directly.

    If there's no endpoint (signing is not enabled) or isn't reviewed yet,
    or there was an error while signing, raise an exception - it
    shouldn't happen.

    Otherwise proceed with signing and return the signed file.
    """
    if (file_obj.version.addon.type == amo.ADDON_SEARCH and
            file_obj.version.is_webextension is False):
        # Those aren't meant to be signed, we shouldn't be here.
        return file_obj

    if not settings.ENABLE_ADDON_SIGNING:
        raise SigningError(u'Not signing file {0}: no active endpoint'.format(
            file_obj.pk))

    # No file? No signature.
    if not os.path.exists(file_obj.current_file_path):
        raise SigningError(u'File {0} doesn\'t exist on disk'.format(
            file_obj.current_file_path))

    # Don't sign Mozilla signed extensions (they're already signed).
    if file_obj.is_mozilla_signed_extension:
        # Don't raise an exception here, just log and return file_obj even
        # though we didn't sign, it's not an error - we just don't need to do
        # anything in this case.
        log.info(u'Not signing file {0}: mozilla signed extension is already '
                 u'signed'.format(file_obj.pk))
        return file_obj

    # We only sign files that are compatible with Firefox.
    if not supports_firefox(file_obj):
        raise SigningError(
            u'Not signing version {0}: not for a Firefox version we support'
            .format(file_obj.version.pk))

    # Sign the file. If there's any exception, we skip the rest.
    cert_serial_num = six.text_type(call_signing(file_obj))

    size = storage.size(file_obj.current_file_path)

    # Save the certificate serial number for revocation if needed, and re-hash
    # the file now that it's been signed.
    file_obj.update(cert_serial_num=cert_serial_num,
                    hash=file_obj.generate_hash(),
                    is_signed=True,
                    size=size)
    log.info(u'Signing complete for file {0}'.format(file_obj.pk))
    return file_obj
Ejemplo n.º 56
0
def test_extract_header_img():
    file_obj = os.path.join(
        settings.ROOT, 'src/olympia/devhub/tests/addons/static_theme.zip')
    data = {'images': {'headerURL': 'weta.png'}}
    dest_path = tempfile.mkdtemp()
    header_file = dest_path + '/weta.png'
    assert not default_storage.exists(header_file)

    utils.extract_header_img(file_obj, data, dest_path)
    assert default_storage.exists(header_file)
    assert default_storage.size(header_file) == 126447
Ejemplo n.º 57
0
def get_fileinfo(f):
    filename = os.path.split(f)[1]
    filetype = get_filetype(filename)
    return {
        'name': filename,
        'type': filetype,
        'preview': filetype in ('image', 'audio', 'video'),
        'size': default_storage.size(f),
        'date': default_storage.get_modified_time(f),
        'url': default_storage.url(f),
    }
Ejemplo n.º 58
0
def recalc_hash(request, file_id):

    file = get_object_or_404(File, pk=file_id)
    file.size = storage.size(file.file_path)
    file.hash = file.generate_hash()
    file.save()

    log.info('Recalculated hash for file ID %d' % file.id)
    messages.success(request,
                     'File hash and size recalculated for file %d.' % file.id)
    return {'success': 1}
Ejemplo n.º 59
0
def test_extract_header_img():
    file_obj = os.path.join(
        settings.ROOT, 'src/olympia/devhub/tests/addons/static_theme.zip')
    data = {'images': {'headerURL': 'weta.png'}}
    dest_path = tempfile.mkdtemp()
    header_file = dest_path + '/weta.png'
    assert not default_storage.exists(header_file)

    utils.extract_header_img(file_obj, data, dest_path)
    assert default_storage.exists(header_file)
    assert default_storage.size(header_file) == 126447