def setup_files(self): # Clean out any left over stuff. private_storage.delete(self.file.signed_file_path) private_storage.delete(self.file.signed_reviewer_file_path) # Make sure the source file is there. if not private_storage.exists(self.file.file_path): copy_to_storage(self.packaged_app_path('mozball.zip'), self.file.file_path)
def compress_export(tarball_name, date): # We need a temporary directory on the local filesystem that will contain # all files in order to call `tar`. local_source_dir = tempfile.mkdtemp() apps_dirpath = os.path.join(settings.DUMPED_APPS_PATH, 'apps') # In case there are no 'apps' directory, add a dummy file to make the apps # directory in the tar archive non-empty It should not happen in prod, but # it's nice to have it to prevent the task from failing entirely. with private_storage.open(os.path.join(apps_dirpath, '0', '.keep'), 'w') as fd: fd.write('.') # Now, copy content from private_storage to that temp directory. We don't # need to worry about creating the directories locally, the storage class # does that for us. for dirpath, dirnames, filenames in walk_storage(apps_dirpath, storage=private_storage): for filename in filenames: src_path = os.path.join(dirpath, filename) dst_path = os.path.join(local_source_dir, 'apps', os.path.basename(dirpath), filename) copy_to_storage(src_path, dst_path, src_storage=private_storage, dst_storage=local_storage) # Also add extra files to the temp directory. extra_filenames = compile_extra_files(local_source_dir, date) # All our files are now present locally, let's generate a local filename # that will contain the final '.tar.gz' before it's copied over to # public storage. local_target_file = tempfile.NamedTemporaryFile(suffix='.tgz', prefix='dumped-apps-') # tar ALL the things! cmd = ['tar', 'czf', local_target_file.name, '-C', local_source_dir ] + ['apps'] + extra_filenames task_log.info(u'Creating dump {0}'.format(local_target_file.name)) subprocess.call(cmd) # Now copy the local tgz to the public storage. remote_target_filename = os.path.join(settings.DUMPED_APPS_PATH, 'tarballs', '%s.tgz' % tarball_name) copy_to_storage(local_target_file.name, remote_target_filename, dst_storage=public_storage) # Clean-up. local_target_file.close() rm_directory(local_source_dir) return remote_target_filename
def compress_export(tarball_name, date): # We need a temporary directory on the local filesystem that will contain # all files in order to call `tar`. local_source_dir = tempfile.mkdtemp() apps_dirpath = os.path.join(settings.DUMPED_APPS_PATH, 'apps') # In case there are no 'apps' directory, add a dummy file to make the apps # directory in the tar archive non-empty It should not happen in prod, but # it's nice to have it to prevent the task from failing entirely. with private_storage.open( os.path.join(apps_dirpath, '0', '.keep'), 'w') as fd: fd.write('.') # Now, copy content from private_storage to that temp directory. We don't # need to worry about creating the directories locally, the storage class # does that for us. for dirpath, dirnames, filenames in walk_storage( apps_dirpath, storage=private_storage): for filename in filenames: src_path = os.path.join(dirpath, filename) dst_path = os.path.join( local_source_dir, 'apps', os.path.basename(dirpath), filename) copy_to_storage( src_path, dst_path, src_storage=private_storage, dst_storage=local_storage) # Also add extra files to the temp directory. extra_filenames = compile_extra_files(local_source_dir, date) # All our files are now present locally, let's generate a local filename # that will contain the final '.tar.gz' before it's copied over to # public storage. local_target_file = tempfile.NamedTemporaryFile( suffix='.tgz', prefix='dumped-apps-') # tar ALL the things! cmd = ['tar', 'czf', local_target_file.name, '-C', local_source_dir] + ['apps'] + extra_filenames task_log.info(u'Creating dump {0}'.format(local_target_file.name)) subprocess.call(cmd) # Now copy the local tgz to the public storage. remote_target_filename = os.path.join( settings.DUMPED_APPS_PATH, 'tarballs', '%s.tgz' % tarball_name) copy_to_storage(local_target_file.name, remote_target_filename, dst_storage=public_storage) # Clean-up. local_target_file.close() rm_directory(local_source_dir) return remote_target_filename
def test_resize_transparency(): src = get_image_path('transparent.png') dest = tempfile.mkstemp(dir=settings.TMP_PATH)[1] expected = src.replace('.png', '-expected.png') if storage_is_remote(): copy_to_storage(src, src, src_storage=local_storage) try: resize_image(src, dest, (32, 32), remove_src=False) with public_storage.open(dest) as dfh: with open(expected) as efh: assert dfh.read() == efh.read() finally: if public_storage.exists(dest): public_storage.delete(dest)
def setup_files(self, filename='mozball.zip'): # Local source filename must exist. assert os.path.exists(self.packaged_app_path(filename)) # Remote filename must not be empty. assert self.file.filename # Original packaged file. copy_to_storage(self.packaged_app_path(filename), self.file.file_path) # Signed packaged file. copy_to_storage(self.packaged_app_path(filename), self.file.signed_file_path)
def test_admin_can_blocklist(self): blocklist_zip_path = os.path.join(settings.MEDIA_ROOT, 'packaged-apps', 'blocklisted.zip') if storage_is_remote(): copy_to_storage(blocklist_zip_path, blocklist_zip_path) self.grant_permission( UserProfile.objects.get(email='*****@*****.**'), 'Apps:Configure') self.login('*****@*****.**') v_count = self.app.versions.count() url = self.app.get_dev_url('blocklist') res = self.client.post(url) self.assert3xx(res, self.app.get_dev_url('versions')) app = self.app.reload() eq_(app.versions.count(), v_count + 1) eq_(app.status, mkt.STATUS_BLOCKED) eq_(app.versions.latest().files.latest().status, mkt.STATUS_BLOCKED)
def upload(self, name, **kwargs): if os.path.splitext(name)[-1] not in ['.webapp', '.zip']: name = name + '.zip' v = json.dumps(dict(errors=0, warnings=1, notices=2, metadata={})) fname = nfd_str(self.packaged_app_path(name)) if not local_storage.exists(fname): raise ValueError('The file %s does not exist :(', fname) if not private_storage.exists(fname): copy_to_storage(fname) data = { 'path': fname, 'name': name, 'hash': 'sha256:%s' % name, 'validation': v } data.update(**kwargs) return FileUpload.objects.create(**data)