예제 #1
0
파일: models.py 프로젝트: Witia1/zamboni
 def reviewer_sign_file(self):
     """Sign the original file (`file_path`) with reviewer certs, then move
     the signed file to the reviewers-specific signed path
     (`reviewer_signed_file_path`) on private storage."""
     if not self.extension.uuid:
         raise SigningError('Need uuid to be set to sign')
     if not self.pk:
         raise SigningError('Need version pk to be set to sign')
     ids = json.dumps({
         'id': self.review_id,
         'version': self.pk
     })
     with statsd.timer('extensions.sign_reviewer'):
         try:
             # This will read the file from self.file_path, generate a
             # reviewer signature and write the signed file to
             # self.reviewer_signed_file_path.
             sign_app(private_storage.open(self.file_path),
                      self.reviewer_signed_file_path, ids, reviewer=True)
         except SigningError:
             log.info(
                 '[ExtensionVersion:%s] Reviewer Signing failed' % self.pk)
             if private_storage.exists(self.reviewer_signed_file_path):
                 private_storage.delete(self.reviewer_signed_file_path)
             raise
예제 #2
0
def rmtree(prefix):
    dirs, files = private_storage.listdir(prefix)
    for fname in files:
        private_storage.delete(os.path.join(prefix, fname))
    for d in dirs:
        rmtree(os.path.join(prefix, d))
    private_storage.delete(prefix)
예제 #3
0
파일: models.py 프로젝트: waseem18/zamboni
    def delete(self):
        log.info(u'Version deleted: %r (%s)' % (self, self.id))
        mkt.log(mkt.LOG.DELETE_VERSION, self.addon, str(self.version))

        models.signals.pre_delete.send(sender=Version, instance=self)

        was_current = False
        if self == self.addon.current_version:
            was_current = True

        self.update(deleted=True)

        # Set file status to disabled.
        f = self.all_files[0]
        f.update(status=mkt.STATUS_DISABLED, _signal=False)
        f.hide_disabled_file()

        # If version deleted was the current version and there now exists
        # another current_version, we need to call some extra methods to update
        # various bits for packaged apps.
        if was_current and self.addon.current_version:
            self.addon.update_name_from_package_manifest()
            self.addon.update_supported_locales()

        if self.addon.is_packaged:
            # Unlink signed packages if packaged app.
            public_storage.delete(f.signed_file_path)
            log.info(u'Unlinked file: %s' % f.signed_file_path)
            private_storage.delete(f.signed_reviewer_file_path)
            log.info(u'Unlinked file: %s' % f.signed_reviewer_file_path)

        models.signals.post_delete.send(sender=Version, instance=self)
예제 #4
0
 def test_delete_mid_read(self):
     self.viewer.extract()
     self.viewer.select('install.js')
     private_storage.delete(os.path.join(self.viewer.dest, 'install.js'))
     res = self.viewer.read_file()
     eq_(res, '')
     assert self.viewer.selected['msg'].startswith('That file no')
예제 #5
0
파일: cron.py 프로젝트: Fjoerfoks/zamboni
def dump_user_installs_cron():
    """
    Sets up tasks to do user install dumps.
    """
    chunk_size = 100
    # Get valid users to dump.
    user_ids = set(Installed.objects.filter(user__enable_recommendations=True)
                   .values_list('user', flat=True))

    # Clean up the path where we'll store the individual json files from each
    # user installs dump (which are in users/ in DUMPED_USERS_PATH).
    path_to_cleanup = os.path.join(settings.DUMPED_USERS_PATH, 'users')
    task_log.info('Cleaning up path {0}'.format(path_to_cleanup))
    try:
        for dirpath, dirnames, filenames in walk_storage(
                path_to_cleanup, storage=private_storage):
            for filename in filenames:
                private_storage.delete(os.path.join(dirpath, filename))
    except OSError:
        # Ignore if the directory does not exist.
        pass

    grouping = []
    for chunk in chunked(user_ids, chunk_size):
        grouping.append(dump_user_installs.subtask(args=[chunk]))

    post = zip_users.subtask(immutable=True)
    ts = chord(grouping, post)
    ts.apply_async()
예제 #6
0
파일: tasks.py 프로젝트: ujdhesa/zamboni
def resize_preview(src, pk, **kw):
    """Resizes preview images and stores the sizes on the preview."""
    instance = Preview.objects.get(pk=pk)
    thumb_dst, full_dst = instance.thumbnail_path, instance.image_path
    sizes = instance.sizes or {}
    log.info("[1@None] Resizing preview and storing size: %s" % thumb_dst)
    try:
        thumbnail_size = APP_PREVIEW_SIZES[0][:2]
        image_size = APP_PREVIEW_SIZES[1][:2]
        with private_storage.open(src, "rb") as fp:
            size = Image.open(fp).size
        if size[0] > size[1]:
            # If the image is wider than tall, then reverse the wanted size
            # to keep the original aspect ratio while still resizing to
            # the correct dimensions.
            thumbnail_size = thumbnail_size[::-1]
            image_size = image_size[::-1]

        if kw.get("generate_thumbnail", True):
            sizes["thumbnail"] = resize_image(src, thumb_dst, thumbnail_size, remove_src=False)
        if kw.get("generate_image", True):
            sizes["image"] = resize_image(src, full_dst, image_size, remove_src=False)
        instance.sizes = sizes
        instance.save()
        log.info("Preview resized to: %s" % thumb_dst)

        # Remove src file now that it has been processed.
        private_storage.delete(src)

        return True

    except Exception, e:
        log.error("Error saving preview: %s; %s" % (e, thumb_dst))
예제 #7
0
파일: models.py 프로젝트: 1Smert1/zamboni
 def reviewer_sign_file(self):
     """Sign the original file (`file_path`) with reviewer certs, then move
     the signed file to the reviewers-specific signed path
     (`reviewer_signed_file_path`) on private storage."""
     if not self.extension.uuid:
         raise SigningError('Need uuid to be set to sign')
     if not self.pk:
         raise SigningError('Need version pk to be set to sign')
     ids = json.dumps({
         # Reviewers get a unique 'id' so the reviewer installed add-on
         # won't conflict with the public add-on, and also so even multiple
         # versions of the same add-on can be installed side by side with
         # other versions.
         'id': 'reviewer-{guid}-{version_id}'.format(
             guid=self.extension.uuid, version_id=self.pk),
         'version': self.pk
     })
     with statsd.timer('extensions.sign_reviewer'):
         try:
             # This will read the file from self.file_path, generate a
             # reviewer signature and write the signed file to
             # self.reviewer_signed_file_path.
             sign_app(private_storage.open(self.file_path),
                      self.reviewer_signed_file_path, ids, reviewer=True)
         except SigningError:
             log.info(
                 '[ExtensionVersion:%s] Reviewer Signing failed' % self.pk)
             if private_storage.exists(self.reviewer_signed_file_path):
                 private_storage.delete(self.reviewer_signed_file_path)
             raise
예제 #8
0
파일: models.py 프로젝트: shahbaz17/zamboni
    def delete(self):
        log.info(u'Version deleted: %r (%s)' % (self, self.id))
        mkt.log(mkt.LOG.DELETE_VERSION, self.webapp, str(self.version))

        models.signals.pre_delete.send(sender=Version, instance=self)

        was_current = False
        if self == self.webapp.current_version:
            was_current = True

        self.update(deleted=True)

        # Set file status to disabled.
        f = self.all_files[0]
        f.update(status=mkt.STATUS_DISABLED, _signal=False)
        f.hide_disabled_file()

        # If version deleted was the current version and there now exists
        # another current_version, we need to call some extra methods to update
        # various bits for packaged apps.
        if was_current and self.webapp.current_version:
            self.webapp.update_name_from_package_manifest()
            self.webapp.update_supported_locales()

        if self.webapp.is_packaged:
            # Unlink signed packages if packaged app.
            public_storage.delete(f.signed_file_path)
            log.info(u'Unlinked file: %s' % f.signed_file_path)
            private_storage.delete(f.signed_reviewer_file_path)
            log.info(u'Unlinked file: %s' % f.signed_reviewer_file_path)

        models.signals.post_delete.send(sender=Version, instance=self)
예제 #9
0
파일: helpers.py 프로젝트: waseem18/zamboni
def rmtree(prefix):
    dirs, files = private_storage.listdir(prefix)
    for fname in files:
        private_storage.delete(os.path.join(prefix, fname))
    for d in dirs:
        rmtree(os.path.join(prefix, d))
    private_storage.delete(prefix)
예제 #10
0
파일: cron.py 프로젝트: Fjoerfoks/zamboni
def cleanup_extracted_file():
    log.info('Removing extracted files for file viewer.')
    root = os.path.join(settings.TMP_PATH, 'file_viewer')
    # Local storage uses local time for file modification. S3 uses UTC time.
    now = datetime.utcnow if storage_is_remote() else datetime.now
    for path in private_storage.listdir(root)[0]:
        full = os.path.join(root, path)
        age = now() - private_storage.modified_time(
            os.path.join(full, 'manifest.webapp'))
        if age.total_seconds() > (60 * 60):
            log.debug('Removing extracted files: %s, %dsecs old.' %
                      (full, age.total_seconds()))
            for subroot, dirs, files in walk_storage(full):
                for f in files:
                    private_storage.delete(os.path.join(subroot, f))
            # Nuke out the file and diff caches when the file gets removed.
            id = os.path.basename(path)
            try:
                int(id)
            except ValueError:
                continue

            key = hashlib.md5()
            key.update(str(id))
            cache.delete('%s:memoize:%s:%s' % (settings.CACHE_PREFIX,
                                               'file-viewer', key.hexdigest()))
예제 #11
0
 def test_bom(self):
     dest = os.path.join(settings.TMP_PATH, 'test_bom')
     with private_storage.open(dest, 'w') as f:
         f.write('foo'.encode('utf-16'))
     self.viewer.select('foo')
     self.viewer.selected = {'full': dest, 'size': 1}
     eq_(self.viewer.read_file(), u'foo')
     private_storage.delete(dest)
예제 #12
0
 def test_view_one_missing(self):
     self.file_viewer.extract()
     private_storage.delete(
         os.path.join(self.file_viewer.right.dest, 'script.js'))
     res = self.client.get(self.file_url(not_binary))
     doc = pq(res.content)
     eq_(len(doc('pre')), 3)
     eq_(len(doc('#content-wrapper p')), 2)
예제 #13
0
 def test_different_tree(self):
     self.file_viewer.extract()
     private_storage.delete(
         os.path.join(self.file_viewer.left.dest, not_binary))
     res = self.client.get(self.file_url(not_binary))
     doc = pq(res.content)
     eq_(doc('h4:last').text(), 'Deleted files:')
     eq_(len(doc('ul.root')), 2)
예제 #14
0
 def test_view_one_missing(self):
     self.file_viewer.extract()
     private_storage.delete(os.path.join(self.file_viewer.right.dest,
                                         'script.js'))
     res = self.client.get(self.file_url(not_binary))
     doc = pq(res.content)
     eq_(len(doc('pre')), 3)
     eq_(len(doc('#content-wrapper p')), 2)
예제 #15
0
 def test_different_tree(self):
     self.file_viewer.extract()
     private_storage.delete(os.path.join(self.file_viewer.left.dest,
                                         not_binary))
     res = self.client.get(self.file_url(not_binary))
     doc = pq(res.content)
     eq_(doc('h4:last').text(), 'Deleted files:')
     eq_(len(doc('ul.root')), 2)
예제 #16
0
    def setup_files(self):
        # Clean out any left over stuff.
        private_storage.delete(self.file.signed_file_path)
        private_storage.delete(self.file.signed_reviewer_file_path)

        # Make sure the source file is there.
        if not private_storage.exists(self.file.file_path):
            copy_to_storage(self.packaged_app_path('mozball.zip'),
                            self.file.file_path)
예제 #17
0
 def cleanup(self):
     try:
         for root, dirs, files in walk_storage(
                 self.dest, storage=private_storage):
             for fname in files:
                 private_storage.delete(os.path.join(root, fname))
     except OSError as e:
         if e.errno == 2:
             # Directory doesn't exist, nothing to clean up.
             return
         raise
예제 #18
0
파일: helpers.py 프로젝트: waseem18/zamboni
 def cleanup(self):
     try:
         for root, dirs, files in walk_storage(self.dest,
                                               storage=private_storage):
             for fname in files:
                 private_storage.delete(os.path.join(root, fname))
     except OSError as e:
         if e.errno == 2:
             # Directory doesn't exist, nothing to clean up.
             return
         raise
예제 #19
0
파일: fakedata.py 프로젝트: jostw/zamboni
def generate_previews(app, n=1):
    gen = pydenticon.Generator(8, 12, foreground=foreground, digest=hashlib.sha512)
    for i in range(n):
        img = gen.generate(unicode(app.name) + unichr(i), 320, 480, output_format="png")
        p = Preview.objects.create(addon=app, filetype="image/png", caption="screenshot " + str(i), position=i)
        fn = tempfile.mktemp()
        try:
            f = private_storage.open(fn, "w")
            f.write(img)
            f.close()
            resize_preview(fn, p.pk)
        finally:
            private_storage.delete(fn)
예제 #20
0
def clean_old_signed(seconds=60 * 60):
    """Clean out apps signed for reviewers."""
    log.info('Removing old apps signed for reviewers')
    root = settings.SIGNED_APPS_REVIEWER_PATH
    # Local storage uses local time for file modification. S3 uses UTC time.
    now = datetime.utcnow if storage_is_remote() else datetime.now
    for nextroot, dirs, files in walk_storage(root):
        for fn in files:
            full = os.path.join(nextroot, fn)
            age = now() - private_storage.modified_time(full)
            if age.total_seconds() > seconds:
                log.debug('Removing signed app: %s, %dsecs old.' % (
                    full, age.total_seconds()))
                private_storage.delete(full)
예제 #21
0
 def test_delete_with_file(self):
     """Test that when a Extension instance is deleted, the corresponding
     file on the filesystem is also deleted."""
     extension = Extension.objects.create(version='0.1')
     file_path = extension.file_path
     with private_storage.open(file_path, 'w') as f:
         f.write('sample data\n')
     assert private_storage.exists(file_path)
     try:
         extension.delete()
         assert not private_storage.exists(file_path)
     finally:
         if private_storage.exists(file_path):
             private_storage.delete(file_path)
예제 #22
0
파일: cron.py 프로젝트: Fjoerfoks/zamboni
def mkt_gc(**kw):
    """Site-wide garbage collections."""
    log.info('Collecting data to delete')
    logs = (ActivityLog.objects.filter(created__lt=days_ago(90))
            .exclude(action__in=mkt.LOG_KEEP).values_list('id', flat=True))

    for chunk in chunked(logs, 100):
        chunk.sort()
        log.info('Deleting log entries: %s' % str(chunk))
        delete_logs.delay(chunk)

    # Clear oauth nonce rows. These expire after 10 minutes but we're just
    # clearing those that are more than 1 day old.
    Nonce.objects.filter(created__lt=days_ago(1)).delete()

    # Delete the dump apps over 30 days.
    _remove_stale_files(os.path.join(settings.DUMPED_APPS_PATH, 'tarballs'),
                        settings.DUMPED_APPS_DAYS_DELETE,
                        'Deleting old tarball: {0}',
                        storage=public_storage)

    # Delete the dumped user installs over 30 days. Those are using private
    # storage.
    _remove_stale_files(os.path.join(settings.DUMPED_USERS_PATH, 'tarballs'),
                        settings.DUMPED_USERS_DAYS_DELETE,
                        'Deleting old tarball: {0}',
                        storage=private_storage)

    # Delete old files in select directories under TMP_PATH.
    _remove_stale_files(os.path.join(settings.TMP_PATH, 'preview'),
                        settings.TMP_PATH_DAYS_DELETE,
                        'Deleting TMP_PATH file: {0}',
                        storage=private_storage)
    _remove_stale_files(os.path.join(settings.TMP_PATH, 'icon'),
                        settings.TMP_PATH_DAYS_DELETE,
                        'Deleting TMP_PATH file: {0}',
                        storage=private_storage)

    # Delete stale FileUploads.
    for fu in FileUpload.objects.filter(created__lte=days_ago(90)):
        log.debug(u'[FileUpload:{uuid}] Removing file: {path}'
                  .format(uuid=fu.uuid, path=fu.path))
        if fu.path:
            try:
                private_storage.delete(fu.path)
            except OSError:
                pass
        fu.delete()
예제 #23
0
def export_data(name=None):
    today = datetime.datetime.today().strftime('%Y-%m-%d')
    if name is None:
        name = today

    # Clean up the path where we'll store the individual json files from each
    # app dump.
    for dirpath, dirnames, filenames in walk_storage(
            settings.DUMPED_APPS_PATH, storage=private_storage):
        for filename in filenames:
            private_storage.delete(os.path.join(dirpath, filename))
    task_log.info('Cleaning up path {0}'.format(settings.DUMPED_APPS_PATH))

    # Run all dump_apps task in parallel, and once it's done, add extra files
    # and run compression.
    chord(dump_all_apps_tasks(),
          compress_export.si(tarball_name=name, date=today)).apply_async()
예제 #24
0
def cleanup_file(sender, instance, **kw):
    """ On delete of the file object from the database, unlink the file from
    the file system """
    if kw.get('raw') or not instance.filename:
        return
    # Use getattr so the paths are accessed inside the try block.
    for path in ('file_path', 'guarded_file_path', 'signed_file_path'):
        try:
            filename = getattr(instance, path, None)
        except models.ObjectDoesNotExist:
            return
        if filename and (public_storage.exists(filename) or
                         private_storage.exists(filename)):
            log.info('Removing filename: %s for file: %s'
                     % (filename, instance.pk))
            public_storage.delete(filename)
            private_storage.delete(filename)
예제 #25
0
def generate_previews(app, n=1):
    gen = pydenticon.Generator(8, 12, foreground=foreground,
                               digest=hashlib.sha512)
    for i in range(n):
        img = gen.generate(unicode(app.name) + unichr(i), 320, 480,
                           output_format="png")
        p = Preview.objects.create(addon=app, filetype="image/png",
                                   caption="screenshot " + str(i),
                                   position=i)
        fn = tempfile.mktemp()
        try:
            f = private_storage.open(fn, 'w')
            f.write(img)
            f.close()
            resize_preview(fn, p.pk)
        finally:
            private_storage.delete(fn)
예제 #26
0
def export_data(name=None):
    today = datetime.datetime.today().strftime('%Y-%m-%d')
    if name is None:
        name = today

    # Clean up the path where we'll store the individual json files from each
    # app dump.
    for dirpath, dirnames, filenames in walk_storage(settings.DUMPED_APPS_PATH,
                                                     storage=private_storage):
        for filename in filenames:
            private_storage.delete(os.path.join(dirpath, filename))
    task_log.info('Cleaning up path {0}'.format(settings.DUMPED_APPS_PATH))

    # Run all dump_apps task in parallel, and once it's done, add extra files
    # and run compression.
    chord(dump_all_apps_tasks(),
          compress_export.si(tarball_name=name, date=today)).apply_async()
예제 #27
0
파일: tasks.py 프로젝트: kolyaflash/zamboni
def resize_promo_imgs(src, dst, sizes, **kw):
    """Resizes webapp/website promo imgs."""
    log.info('[1@None] Resizing promo imgs: %s' % dst)
    try:
        for s in sizes:
            size_dst = '%s-%s.png' % (dst, s)
            # Crop only to the width, keeping the aspect ratio.
            resize_image(src, size_dst, (s, 0), remove_src=False)
            pngcrush_image.delay(size_dst, **kw)

        with private_storage.open(src) as fd:
            promo_img_hash = _hash_file(fd)
        private_storage.delete(src)

        log.info('Promo img hash resizing completed for: %s' % dst)
        return {'promo_img_hash': promo_img_hash}
    except Exception, e:
        log.error("Error resizing promo img hash: %s; %s" % (e, dst))
예제 #28
0
 def test_delete_with_file(self):
     """Test that when a Extension instance is deleted, the ExtensionVersion
     referencing it are also deleted, as well as the attached files."""
     extension = Extension.objects.create()
     version = ExtensionVersion.objects.create(
         extension=extension, version='0.1')
     file_path = version.file_path
     with private_storage.open(file_path, 'w') as f:
         f.write('sample data\n')
     assert private_storage.exists(file_path)
     try:
         extension.delete()
         assert not Extension.objects.count()
         assert not ExtensionVersion.objects.count()
         assert not private_storage.exists(file_path)
     finally:
         if private_storage.exists(file_path):
             private_storage.delete(file_path)
예제 #29
0
파일: models.py 프로젝트: wangeek/zamboni
def cleanup_file(sender, instance, **kw):
    """ On delete of the file object from the database, unlink the file from
    the file system """
    if kw.get('raw') or not instance.filename:
        return
    # Use getattr so the paths are accessed inside the try block.
    for path in ('file_path', 'guarded_file_path', 'reviewer_signed_file_path',
                 'signed_file_path'):
        try:
            filename = getattr(instance, path, None)
        except models.ObjectDoesNotExist:
            return
        if filename and (public_storage.exists(filename) or
                         private_storage.exists(filename)):
            log.info('Removing filename: %s for file: %s'
                     % (filename, instance.pk))
            public_storage.delete(filename)
            private_storage.delete(filename)
예제 #30
0
파일: models.py 프로젝트: shahbaz17/zamboni
    def from_upload(cls, upload, webapp, send_signal=True):
        data = utils.parse_webapp(upload, webapp)
        max_len = cls._meta.get_field_by_name('_developer_name')[0].max_length
        developer = data.get('developer_name', '')[:max_len]
        v = cls.objects.create(webapp=webapp, version=data['version'],
                               _developer_name=developer)
        log.info('New version: %r (%s) from %r' % (v, v.id, upload))

        # To avoid circular import.
        from mkt.webapps.models import AppManifest

        # Note: This must happen before we call `File.from_upload`.
        manifest = utils.WebAppParser().get_json_data(upload)
        AppManifest.objects.create(
            version=v, manifest=json.dumps(manifest))

        File.from_upload(upload, v, parse_data=data)

        # Update supported locales from manifest.
        # Note: This needs to happen after we call `File.from_upload`.
        update_supported_locales_single.apply_async(
            args=[webapp.id], kwargs={'latest': True},
            eta=datetime.datetime.now() +
            datetime.timedelta(seconds=settings.NFS_LAG_DELAY)
        )

        v.disable_old_files()
        # After the upload has been copied, remove the upload.
        private_storage.delete(upload.path)
        if send_signal:
            version_uploaded.send(sender=v)

        # If packaged app and app is blocked, put in escalation queue.
        if webapp.is_packaged and webapp.status == mkt.STATUS_BLOCKED:
            # To avoid circular import.
            from mkt.reviewers.models import EscalationQueue
            EscalationQueue.objects.create(webapp=webapp)

        return v
예제 #31
0
파일: tasks.py 프로젝트: mrheides/zamboni
def export_data(name=None):
    today = datetime.datetime.today().strftime('%Y-%m-%d')
    if name is None:
        name = today

    # Clean up the path where we'll store the individual json files from each
    # app dump (which are in apps/ inside DUMPED_APPS_PATH).
    path_to_cleanup = os.path.join(settings.DUMPED_APPS_PATH, 'apps')
    task_log.info('Cleaning up path {0}'.format(settings.DUMPED_APPS_PATH))
    try:
        for dirpath, dirnames, filenames in walk_storage(
                path_to_cleanup, storage=private_storage):
            for filename in filenames:
                private_storage.delete(os.path.join(dirpath, filename))
    except OSError:
        # Ignore if the directory does not exist.
        pass

    # Run all dump_apps task in parallel, and once it's done, add extra files
    # and run compression.
    chord(dump_all_apps_tasks(),
          compress_export.si(tarball_name=name, date=today)).apply_async()
예제 #32
0
파일: models.py 프로젝트: waseem18/zamboni
    def from_upload(cls, upload, addon, send_signal=True):
        data = utils.parse_addon(upload, addon)
        max_len = cls._meta.get_field_by_name('_developer_name')[0].max_length
        developer = data.get('developer_name', '')[:max_len]
        v = cls.objects.create(addon=addon, version=data['version'],
                               _developer_name=developer)
        log.info('New version: %r (%s) from %r' % (v, v.id, upload))

        # To avoid circular import.
        from mkt.webapps.models import AppManifest

        # Note: This must happen before we call `File.from_upload`.
        manifest = utils.WebAppParser().get_json_data(upload)
        AppManifest.objects.create(
            version=v, manifest=json.dumps(manifest))

        File.from_upload(upload, v, parse_data=data)

        # Update supported locales from manifest.
        # Note: This needs to happen after we call `File.from_upload`.
        update_supported_locales_single.apply_async(
            args=[addon.id], kwargs={'latest': True},
            eta=datetime.datetime.now() +
            datetime.timedelta(seconds=settings.NFS_LAG_DELAY)
        )

        v.disable_old_files()
        # After the upload has been copied, remove the upload.
        private_storage.delete(upload.path)
        if send_signal:
            version_uploaded.send(sender=v)

        # If packaged app and app is blocked, put in escalation queue.
        if addon.is_packaged and addon.status == mkt.STATUS_BLOCKED:
            # To avoid circular import.
            from mkt.reviewers.models import EscalationQueue
            EscalationQueue.objects.create(addon=addon)

        return v
예제 #33
0
 def tearDown(self):
     self.helper.cleanup()
     if storage_is_remote():
         private_storage.delete(self.packaged_app_path('signed.zip'))
예제 #34
0
 def tearDown(self):
     private_storage.delete(self.tmp_good)
     private_storage.delete(self.tmp_bad)
     super(TestTask, self).tearDown()
예제 #35
0
 def test_diffable_deleted_files(self):
     self.helper.extract()
     private_storage.delete(os.path.join(self.helper.left.dest,
                                         'index.html'))
     eq_('index.html' in self.helper.get_deleted_files(), True)
예제 #36
0
파일: tests.py 프로젝트: Joergen/zamboni
 def tearDown(self):
     private_storage.delete(self.tmp_good)
     private_storage.delete(self.tmp_bad)
     super(TestTask, self).tearDown()
예제 #37
0
 def test_diffable_one_missing(self):
     self.helper.extract()
     private_storage.delete(os.path.join(self.helper.right.dest,
                                         'index.html'))
     self.helper.select('index.html')
     assert self.helper.is_diffable()
예제 #38
0
 def tearDown(self):
     for tmp in self.tmp_files:
         private_storage.delete(tmp)