def compile_extra_files(target_directory, date): # Put some .txt files in place. This is done locally only, it's only useful # before the tar command is run. context = Context({'date': date, 'url': settings.SITE_URL}) files = ['license.txt', 'readme.txt'] created_files = [] for filename in files: template = loader.get_template('webapps/dump/apps/%s' % filename) dest = os.path.join(target_directory, filename) local_storage.open(dest, 'w').write(template.render(context)) created_files.append(filename) return created_files
def test_too_small_not_ok(self): with local_storage.open(get_image_path('mkt_icon_72.png')) as f: img_file = SimpleUploadedFile('mkt_icon_72.png', f.read(), content_type='image/png') form = PromoImgForm({}, {'promo_img': img_file}) ok_(not form.is_valid())
def test_animated_not_ok(self): with local_storage.open(get_image_path('animated.gif')) as f: img_file = SimpleUploadedFile('animated.gif', f.read(), content_type='image/gif') form = PromoImgForm({}, {'promo_img': img_file}) ok_(not form.is_valid())
def handle(self, *args, **kw): if len(args) < 1: sys.stdout.write('Pass repo name as arg (e.g., fireplace).\n') return repo = args[0] repo_build_id = DeployBuildId.objects.get_or_create(repo=repo)[0] old_build_id = repo_build_id.build_id if len(args) > 1: # Read the build ID from the second argument. repo_build_id.build_id = str(args[1]) else: # Read the build ID from build_id.txt in the repository's root. build_id_path = os.path.join( settings.MEDIA_ROOT, repo, 'build_id.txt') with local_storage.open(build_id_path) as f: repo_build_id.build_id = f.read() # Save it. repo_build_id.save() print "Successfully changed %s's build_id from %s to %s in db\n" % ( repo, old_build_id, repo_build_id.build_id )
def test_icon_too_small(self): with local_storage.open(get_image_path('mkt_icon_72.png')) as f: errors, upload_hash = check_upload(f, 'icon', 'image/png') ok_(errors) ok_(upload_hash) tmp_img_path = os.path.join(settings.TMP_PATH, 'icon', upload_hash) ok_(private_storage.exists(tmp_img_path))
def test_promo_img_too_small(self): with local_storage.open(get_image_path('preview.jpg')) as f: errors, upload_hash = check_upload(f, 'promo_img', 'image/png') ok_(errors) ok_(upload_hash) tmp_img_path = os.path.join(settings.TMP_PATH, 'promo_img', upload_hash) ok_(os.path.isfile(tmp_img_path))
def test_promo_img_ok(self): with local_storage.open(get_image_path('game_1050.jpg')) as f: errors, upload_hash = check_upload(f, 'promo_img', 'image/png') ok_(not errors) ok_(upload_hash) tmp_img_path = os.path.join(settings.TMP_PATH, 'promo_img', upload_hash) ok_(private_storage.exists(tmp_img_path))
def test_preview_too_small(self): with local_storage.open(get_image_path('mkt_icon_72.png')) as f: errors, upload_hash = check_upload(f, 'preview', 'image/png') ok_(errors) ok_(upload_hash) tmp_img_path = os.path.join(settings.TMP_PATH, 'preview', upload_hash) ok_(private_storage.exists(tmp_img_path))
def test_icon_ok(self): with local_storage.open(get_image_path('mozilla-sq.png')) as f: errors, upload_hash = check_upload(f, 'icon', 'image/png') ok_(not errors) ok_(upload_hash) tmp_img_path = os.path.join(settings.TMP_PATH, 'icon', upload_hash) ok_(os.path.isfile(tmp_img_path))
def test_ok(self): app = mkt.site.tests.app_factory() with local_storage.open(get_image_path('game_1050.jpg')) as f: img_file = SimpleUploadedFile('game_1050.jpg', f.read(), content_type='image/jpg') form = PromoImgForm({}, {'promo_img': img_file}) ok_(form.is_valid()) form.save(app)
def get_build_id(repo): try: # Get the build ID from the database (bug 1083185). return DeployBuildId.objects.get(repo=repo).build_id except DeployBuildId.DoesNotExist: # If we haven't initialized a build ID yet, read it directly from the # build_id.txt by our frontend builds. try: build_id_path = os.path.join(settings.MEDIA_ROOT, repo, 'build_id.txt') with local_storage.open(build_id_path) as f: return f.read() except: return 'dev'
def package_signer(): destination = getattr(settings, 'SIGNED_APPS_SERVER', None) if not destination: return '', 'Signer is not configured.' app_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'nagios_check_packaged_app.zip') signed_path = tempfile.mktemp() try: packaged.sign_app(local_storage.open(app_path), signed_path, None, False, local=True) return '', 'Package signer working' except PackageSigningError, e: msg = 'Error on package signing (%s): %s' % (destination, e) return msg, msg
def __iter__(self): if settings.XSENDFILE: return iter([]) chunk = 4096 fp = local_storage.open(self.path, 'rb') if 'wsgi.file_wrapper' in self.request.META: return self.request.META['wsgi.file_wrapper'](fp, chunk) else: self['Content-Length'] = local_storage.size(self.path) def wrapper(): while 1: data = fp.read(chunk) if not data: break yield data return wrapper()
def handle(self, *args, **kw): if len(args) < 1: sys.stdout.write('Pass repo name as arg (e.g., fireplace).\n') return repo = args[0] repo_build_id = DeployBuildId.objects.get_or_create(repo=repo)[0] old_build_id = repo_build_id.build_id if len(args) > 1: # Read the build ID from the second argument. repo_build_id.build_id = str(args[1]) else: # Read the build ID from build_id.txt in the repository's root. build_id_path = os.path.join(settings.MEDIA_ROOT, repo, 'build_id.txt') with local_storage.open(build_id_path) as f: repo_build_id.build_id = f.read() # Save it. repo_build_id.save() print "Successfully changed %s's build_id from %s to %s in db\n" % ( repo, old_build_id, repo_build_id.build_id)
def zip_users(*args, **kw): date = datetime.datetime.utcnow().strftime('%Y-%m-%d') tarball_name = date # We need a temporary directory on the local filesystem that will contain # all files in order to call `tar`. local_source_dir = tempfile.mkdtemp() users_dirpath = os.path.join(settings.DUMPED_USERS_PATH, 'users') # In case users_dirpath is empty, add a dummy file to make the users # directory in the tar archive non-empty. It should not happen in prod, but # it's nice to have it to prevent the task from failing entirely. with private_storage.open(os.path.join(users_dirpath, '0', '.keep'), 'w') as fd: fd.write('.') # Now, copy content from private_storage to that temp directory. We don't # need to worry about creating the directories locally, the storage class # does that for us. for dirpath, dirnames, filenames in walk_storage(users_dirpath, storage=private_storage): for filename in filenames: src_path = os.path.join(dirpath, filename) dst_path = os.path.join(local_source_dir, 'users', os.path.basename(dirpath), filename) copy_stored_file(src_path, dst_path, src_storage=private_storage, dst_storage=local_storage) # Put some .txt files in place locally. context = Context({'date': date, 'url': settings.SITE_URL}) extra_filenames = ['license.txt', 'readme.txt'] for extra_filename in extra_filenames: template = loader.get_template('webapps/dump/users/' + extra_filename) dst = os.path.join(local_source_dir, extra_filename) with local_storage.open(dst, 'w') as fd: fd.write(template.render(context)) # All our files are now present locally, let's generate a local filename # that will contain the final '.tar.gz' before it's copied over to # public storage. local_target_file = tempfile.NamedTemporaryFile(suffix='.tgz', prefix='dumped-users-') # tar ALL the things! cmd = ['tar', 'czf', local_target_file.name, '-C', local_source_dir ] + ['users'] + extra_filenames task_log.info(u'Creating user dump {0}'.format(local_target_file.name)) subprocess.call(cmd) # Now copy the local tgz to the public storage. remote_target_filename = os.path.join(settings.DUMPED_USERS_PATH, 'tarballs', '%s.tgz' % tarball_name) copy_stored_file(local_target_file.name, remote_target_filename, src_storage=local_storage, dst_storage=private_storage) # Clean-up. local_target_file.close() rm_directory(local_source_dir) return remote_target_filename
def zip_users(*args, **kw): date = datetime.datetime.utcnow().strftime('%Y-%m-%d') tarball_name = date # We need a temporary directory on the local filesystem that will contain # all files in order to call `tar`. local_source_dir = tempfile.mkdtemp() users_dirpath = os.path.join(settings.DUMPED_USERS_PATH, 'users') # In case users_dirpath is empty, add a dummy file to make the users # directory in the tar archive non-empty. It should not happen in prod, but # it's nice to have it to prevent the task from failing entirely. with private_storage.open( os.path.join(users_dirpath, '0', '.keep'), 'w') as fd: fd.write('.') # Now, copy content from private_storage to that temp directory. We don't # need to worry about creating the directories locally, the storage class # does that for us. for dirpath, dirnames, filenames in walk_storage( users_dirpath, storage=private_storage): for filename in filenames: src_path = os.path.join(dirpath, filename) dst_path = os.path.join( local_source_dir, 'users', os.path.basename(dirpath), filename) copy_stored_file( src_path, dst_path, src_storage=private_storage, dst_storage=local_storage) # Put some .txt files in place locally. context = Context({'date': date, 'url': settings.SITE_URL}) extra_filenames = ['license.txt', 'readme.txt'] for extra_filename in extra_filenames: template = loader.get_template('webapps/dump/users/' + extra_filename) dst = os.path.join(local_source_dir, extra_filename) with local_storage.open(dst, 'w') as fd: fd.write(template.render(context)) # All our files are now present locally, let's generate a local filename # that will contain the final '.tar.gz' before it's copied over to # public storage. local_target_file = tempfile.NamedTemporaryFile( suffix='.tgz', prefix='dumped-users-') # tar ALL the things! cmd = ['tar', 'czf', local_target_file.name, '-C', local_source_dir] + ['users'] + extra_filenames task_log.info(u'Creating user dump {0}'.format(local_target_file.name)) subprocess.call(cmd) # Now copy the local tgz to the public storage. remote_target_filename = os.path.join( settings.DUMPED_USERS_PATH, 'tarballs', '%s.tgz' % tarball_name) copy_stored_file(local_target_file.name, remote_target_filename, src_storage=local_storage, dst_storage=private_storage) # Clean-up. local_target_file.close() rm_directory(local_source_dir) return remote_target_filename
% response.reason) pkcs7 = b64decode(json.loads(response.content)['zigbert.rsa']) try: jar.make_signed(pkcs7) except: log.error('App signing failed', exc_info=True) raise SigningError('App signing failed') storage = public_storage # By default signed packages are public. if reviewer: storage = private_storage elif local: storage = local_storage with local_storage.open(tempname) as temp_f, \ storage.open(dest, 'w') as dest_f: shutil.copyfileobj(temp_f, dest_f) def _get_endpoint(reviewer=False): """ Returns the proper API endpoint depending whether we are signing for reviewer or for public consumption. """ active = (settings.SIGNED_APPS_REVIEWER_SERVER_ACTIVE if reviewer else settings.SIGNED_APPS_SERVER_ACTIVE) server = (settings.SIGNED_APPS_REVIEWER_SERVER if reviewer else settings.SIGNED_APPS_SERVER) if active:
def _no_sign(src, dest_path): # If this is a local development instance, just copy the file around # so that everything seems to work locally. log.info('Not signing the app, no signing server is active.') with local_storage.open(dest_path, 'w') as dest_f: shutil.copyfileobj(src, dest_f)
response.reason) pkcs7 = b64decode(json.loads(response.content)['zigbert.rsa']) try: jar.make_signed(pkcs7) except: log.error('App signing failed', exc_info=True) raise SigningError('App signing failed') storage = public_storage # By default signed packages are public. if reviewer: storage = private_storage elif local: storage = local_storage with local_storage.open(tempname) as temp_f, \ storage.open(dest, 'w') as dest_f: shutil.copyfileobj(temp_f, dest_f) def _get_endpoint(reviewer=False): """ Returns the proper API endpoint depending whether we are signing for reviewer or for public consumption. """ active = (settings.SIGNED_APPS_REVIEWER_SERVER_ACTIVE if reviewer else settings.SIGNED_APPS_SERVER_ACTIVE) server = (settings.SIGNED_APPS_REVIEWER_SERVER if reviewer else settings.SIGNED_APPS_SERVER) if active: