Ejemplo n.º 1
0
    def sign_file(self):
        """Sign the original file (`file_path`), then move signed extension
        file to the signed path (`signed_file_path`) on public storage. The
        original file remains on private storage.

        Return the signed file size."""
        if not self.extension.uuid:
            raise SigningError('Need uuid to be set to sign')
        if not self.pk:
            raise SigningError('Need version pk to be set to sign')

        ids = json.dumps({
            # 'id' needs to be an unique identifier not shared with anything
            # else (other extensions, langpacks, webapps...), but should not
            # change when there is an update.
            'id': self.extension.uuid,
            # 'version' should be an integer and should be monotonically
            # increasing.
            'version': self.pk
        })
        with statsd.timer('extensions.sign'):
            try:
                # This will read the file from self.file_path, generate a
                # signature and write the signed file to self.signed_file_path.
                sign_app(private_storage.open(self.file_path),
                         self.signed_file_path, ids)
            except SigningError:
                log.info('[ExtensionVersion:%s] Signing failed' % self.pk)
                self.remove_public_signed_file()  # Clean up.
                raise
        return public_storage.size(self.signed_file_path)
Ejemplo n.º 2
0
    def sign_file(self):
        """Sign the original file (`file_path`), then move signed extension
        file to the signed path (`signed_file_path`) on public storage. The
        original file remains on private storage.

        Return the signed file size."""
        if not self.extension.uuid:
            raise SigningError('Need uuid to be set to sign')
        if not self.pk:
            raise SigningError('Need version pk to be set to sign')
        if self.extension.is_blocked():
            raise SigningError('Trying to signed a blocked extension')

        ids = json.dumps({
            # 'id' needs to be an unique identifier not shared with anything
            # else (other extensions, langpacks, webapps...), but should not
            # change when there is an update.
            'id': self.extension.uuid,
            # 'version' should be an integer and should be monotonically
            # increasing.
            'version': self.pk
        })
        with statsd.timer('extensions.sign'):
            try:
                # This will read the file from self.file_path, generate a
                # signature and write the signed file to self.signed_file_path.
                sign_app(private_storage.open(self.file_path),
                         self.signed_file_path, ids)
            except SigningError:
                log.info('[ExtensionVersion:%s] Signing failed' % self.pk)
                self.remove_public_signed_file()  # Clean up.
                raise
        return public_storage.size(self.signed_file_path)
Ejemplo n.º 3
0
def get_cached_minifest(app_or_langpack, force=False):
    """
    Create a "mini" manifest for a packaged app or langpack and cache it (Call
    with `force=True` to bypass existing cache).

    Note that platform expects name/developer/locales to match the data from
    the real manifest in the package, so it needs to be read from the zip file.

    Returns a tuple with the minifest contents and the corresponding etag.
    """
    cache_prefix = 1  # Change this if you are modifying what enters the cache.
    cache_key = "{0}:{1}:{2}:manifest".format(cache_prefix, app_or_langpack._meta.model_name, app_or_langpack.pk)

    if not force:
        cached_data = cache.get(cache_key)
        if cached_data:
            return cached_data

    sign_if_packaged = getattr(app_or_langpack, "sign_if_packaged", None)
    if sign_if_packaged is None:
        # Langpacks are already signed when we generate the manifest and have
        # a file_path attribute.
        signed_file_path = app_or_langpack.file_path
    else:
        # sign_if_packaged() will return the signed path. But to call it, we
        # need a current version. If we don't have one, return an empty
        # manifest, bypassing caching so that when a version does become
        # available it can get picked up correctly.
        if not app_or_langpack.current_version:
            return "{}"
        signed_file_path = sign_if_packaged()

    manifest = app_or_langpack.get_manifest_json()
    package_path = app_or_langpack.get_package_path()

    data = {"size": public_storage.size(signed_file_path), "package_path": package_path}
    if hasattr(app_or_langpack, "current_version"):
        data["version"] = app_or_langpack.current_version.version
        data["release_notes"] = app_or_langpack.current_version.releasenotes
        file_hash = app_or_langpack.current_version.all_files[0].hash
    else:
        # LangPacks have no version model, the version number is an attribute
        # and they don't have release notes.
        data["version"] = app_or_langpack.version
        # File hash is not stored for langpacks, but file_version changes with
        # every new upload so we can use that instead.
        file_hash = unicode(app_or_langpack.file_version)

    for key in ["developer", "icons", "locales", "name"]:
        if key in manifest:
            data[key] = manifest[key]

    data = json.dumps(data, cls=JSONEncoder)
    etag = hashlib.sha256()
    etag.update(data)
    if file_hash:
        etag.update(file_hash)
    rval = (data, etag.hexdigest())
    cache.set(cache_key, rval, None)
    return rval
Ejemplo n.º 4
0
def get_cached_minifest(app_or_langpack, force=False):
    """
    Create a "mini" manifest for a packaged app or langpack and cache it (Call
    with `force=True` to bypass existing cache).

    Note that platform expects name/developer/locales to match the data from
    the real manifest in the package, so it needs to be read from the zip file.

    Returns a tuple with the minifest contents and the corresponding etag.
    """
    cache_prefix = 1  # Change this if you are modifying what enters the cache.
    cache_key = '{0}:{1}:{2}:manifest'.format(cache_prefix,
                                              app_or_langpack._meta.model_name,
                                              app_or_langpack.pk)

    if not force:
        cached_data = cache.get(cache_key)
        if cached_data:
            return cached_data

    sign_if_packaged = getattr(app_or_langpack, 'sign_if_packaged', None)
    if sign_if_packaged is None:
        # Langpacks are already signed when we generate the manifest and have
        # a file_path attribute.
        signed_file_path = app_or_langpack.file_path
    else:
        # sign_if_packaged() will return the signed path. But to call it, we
        # need a current version. If we don't have one, return an empty
        # manifest, bypassing caching so that when a version does become
        # available it can get picked up correctly.
        if not app_or_langpack.current_version:
            return '{}'
        signed_file_path = sign_if_packaged()

    manifest = app_or_langpack.get_manifest_json()
    package_path = app_or_langpack.get_package_path()

    data = {
        'size': public_storage.size(signed_file_path),
        'package_path': package_path,
    }
    if hasattr(app_or_langpack, 'current_version'):
        data['version'] = app_or_langpack.current_version.version
        data['release_notes'] = app_or_langpack.current_version.releasenotes
        file_hash = app_or_langpack.current_version.all_files[0].hash
    else:
        # LangPacks have no version model, the version number is an attribute
        # and they don't have release notes.
        data['version'] = app_or_langpack.version
        # File hash is not stored for langpacks, but file_version changes with
        # every new upload so we can use that instead.
        file_hash = unicode(app_or_langpack.file_version)

    for key in ['developer', 'icons', 'locales', 'name']:
        if key in manifest:
            data[key] = manifest[key]

    data = json.dumps(data, cls=JSONEncoder)
    etag = hashlib.sha256()
    etag.update(data)
    if file_hash:
        etag.update(file_hash)
    rval = (data, etag.hexdigest())
    cache.set(cache_key, rval, None)
    return rval