def repack_fileupload(upload_pk): log.info('Starting task to repackage FileUpload %s', upload_pk) upload = FileUpload.objects.get(pk=upload_pk) # When a FileUpload is created and a file added to it, if it's a xpi/zip, # it should be move to upload.path, and it should have a .xpi extension, # so we only need to care about that extension here. # We don't trust upload.name: it's the original filename as used by the # developer, so it could be something else. if upload.path.endswith('.xpi'): try: tempdir = extract_zip(upload.path) except Exception: # Something bad happened, maybe we couldn't parse the zip file. # This task should have a on_error attached when called by # Validator(), so we can just raise and the developer will get a # generic error message. log.exception('Could not extract upload %s for repack.', upload_pk) raise log.info('Zip from upload %s extracted, repackaging', upload_pk) file_ = tempfile.NamedTemporaryFile(suffix='.zip', delete=False) shutil.make_archive(os.path.splitext(file_.name)[0], 'zip', tempdir) with open(file_.name, 'rb') as f: upload.hash = 'sha256:%s' % get_sha256(f) log.info('Zip from upload %s repackaged, moving file back', upload_pk) move_stored_file(file_.name, upload.path) upload.save() else: log.info('Not repackaging upload %s, it is not a xpi file.', upload_pk)
def test_zip_folder_content(): extension_file = 'src/olympia/files/fixtures/files/extension.xpi' try: temp_folder = utils.extract_zip(extension_file) assert os.listdir(temp_folder) == [ 'install.rdf', 'chrome.manifest', 'chrome'] temp_filename = amo.tests.get_temp_filename() utils.zip_folder_content(temp_folder, temp_filename) # Make sure the zipped files contain the same files. with zipfile.ZipFile(temp_filename, mode='r') as new: with zipfile.ZipFile(extension_file, mode='r') as orig: assert new.namelist() == orig.namelist() finally: if os.path.exists(temp_folder): amo.utils.rm_local_tmp_dir(temp_folder) if os.path.exists(temp_filename): os.unlink(temp_filename)
def test_zip_folder_content(): extension_file = "src/olympia/files/fixtures/files/extension.xpi" temp_filename, temp_folder = None, None try: temp_folder = utils.extract_zip(extension_file) assert sorted(os.listdir(temp_folder)) == ["chrome", "chrome.manifest", "install.rdf"] temp_filename = amo.tests.get_temp_filename() utils.zip_folder_content(temp_folder, temp_filename) # Make sure the zipped files contain the same files. with zipfile.ZipFile(temp_filename, mode="r") as new: with zipfile.ZipFile(extension_file, mode="r") as orig: assert sorted(new.namelist()) == sorted(orig.namelist()) finally: if temp_folder is not None and os.path.exists(temp_folder): amo.utils.rm_local_tmp_dir(temp_folder) if temp_filename is not None and os.path.exists(temp_filename): os.unlink(temp_filename)
def test_zip_folder_content(): extension_file = 'src/olympia/files/fixtures/files/extension.xpi' temp_filename, temp_folder = None, None try: temp_folder = utils.extract_zip(extension_file) assert sorted(os.listdir(temp_folder)) == [ 'chrome', 'chrome.manifest', 'install.rdf' ] temp_filename = amo.tests.get_temp_filename() utils.zip_folder_content(temp_folder, temp_filename) # Make sure the zipped files contain the same files. with zipfile.ZipFile(temp_filename, mode='r') as new: with zipfile.ZipFile(extension_file, mode='r') as orig: assert sorted(new.namelist()) == sorted(orig.namelist()) finally: if temp_folder is not None and os.path.exists(temp_folder): amo.utils.rm_local_tmp_dir(temp_folder) if temp_filename is not None and os.path.exists(temp_filename): os.unlink(temp_filename)
def extract_strict_compatibility_value_for_addon(addon): strict_compatibility = None # We don't know yet. extracted_dir = None try: # We take a shortcut here and only look at the first file we # find... # Note that we can't use parse_addon() wrapper because it no longer # exposes the real value of `strictCompatibility`... path = addon.current_version.all_files[0].file_path with storage.open(path) as file_: extracted_dir = extract_zip(file_) parser = RDFExtractor(extracted_dir) strict_compatibility = parser.find('strictCompatibility') == 'true' except Exception as exp: # A number of things can go wrong: missing file, path somehow not # existing, etc. In any case, that means the add-on is in a weird # state and should be ignored (this is a one off task). log.exception(u'bump_appver_for_legacy_addons: ignoring addon %d, ' u'received %s when extracting.', addon.pk, unicode(exp)) finally: if extracted_dir: rm_local_tmp_dir(extracted_dir) return strict_compatibility
def repack_fileupload(results, upload_pk): log.info('Starting task to repackage FileUpload %s', upload_pk) upload = FileUpload.objects.get(pk=upload_pk) # When a FileUpload is created and a file added to it, if it's a xpi/zip, # it should be move to upload.path, and it should have a .zip extension, # so we only need to care about that extension here. # We don't trust upload.name: it's the original filename as used by the # developer, so it could be something else. if upload.path.endswith('.zip'): timer = StopWatch('files.tasks.repack_fileupload.') timer.start() # tempdir must *not* be on TMP_PATH, we want local fs instead. It will be # deleted automatically once we exit the context manager. with tempfile.TemporaryDirectory( prefix='repack_fileupload_extract') as tempdir: try: extract_zip(upload.path, tempdir=tempdir) if waffle.switch_is_active('enable-manifest-normalization'): manifest = Path(tempdir) / 'manifest.json' if manifest.exists(): try: xpi_data = parse_xpi(upload.path, minimal=True) if not xpi_data.get('is_mozilla_signed_extension', False): json_data = ManifestJSONExtractor( manifest.read_bytes()).data manifest.write_text( json.dumps(json_data, indent=2)) except Exception: # If we cannot normalize the manifest file, we skip # this step and let the linter catch the exact # cause in order to return a more appropriate error # than "unexpected error", which would happen if # this task was handling the error itself. pass except Exception as exc: # Something bad happened, maybe we couldn't parse the zip file. # @validation_task should ensure the exception is caught and # transformed in a generic error message for the developer, so we # just log it and re-raise. log.exception('Could not extract upload %s for repack.', upload_pk, exc_info=exc) raise timer.log_interval('1.extracted') log.info('Zip from upload %s extracted, repackaging', upload_pk) # We'll move the file to its final location below with move_stored_file(), # so don't let tempfile delete it. file_ = tempfile.NamedTemporaryFile(dir=settings.TMP_PATH, suffix='.zip', delete=False) shutil.make_archive( os.path.splitext(file_.name)[0], 'zip', tempdir) with open(file_.name, 'rb') as f: upload.hash = 'sha256:%s' % get_sha256(f) timer.log_interval('2.repackaged') log.info('Zip from upload %s repackaged, moving file back', upload_pk) storage.move_stored_file(file_.name, upload.path) timer.log_interval('3.moved') upload.save() timer.log_interval('4.end') else: log.info('Not repackaging upload %s, it is not a zip file.', upload_pk) return results