def refresh_bokchoy_db_cache_from_s3(fingerprint=None): """ If the cache files for the current fingerprint exist in s3 then replace what you have on disk with those. If no copy exists on s3 then continue without error. """ if not fingerprint: fingerprint = fingerprint_bokchoy_db_files(MIGRATION_OUTPUT_FILES, ALL_DB_FILES) bucket_name = CACHE_BUCKET_NAME path = CACHE_FOLDER if is_fingerprint_in_bucket(fingerprint, bucket_name): zipfile_name = '{}.tar.gz'.format(fingerprint) get_file_from_s3(bucket_name, zipfile_name, path) zipfile_path = os.path.join(path, zipfile_name) print("Extracting db cache files.") extract_files_from_zip(BOKCHOY_DB_FILES, zipfile_path, path) os.remove(zipfile_path)
def test_extract_files_from_zip(self, _mock_verify): test_dir = mkdtemp() output_dir = mkdtemp() self.addCleanup(shutil.rmtree, test_dir) self.addCleanup(shutil.rmtree, output_dir) tmp_file_name = os.path.join(test_dir, 'test.txt') with open(tmp_file_name, 'w') as tmp_file: tmp_file.write('Test file content') tmp_tarfile = os.path.join(test_dir, 'test.tar.gz') with tarfile.open(name=tmp_tarfile, mode='w:gz') as tar_file: tar_file.add(tmp_file_name, arcname='test.txt') extract_files_from_zip(['test.txt'], tmp_tarfile, output_dir) extracted_file = os.path.join(output_dir, 'test.txt') assert os.path.isfile(extracted_file) with open(extracted_file, 'r') as test_file: data = test_file.read() assert data == 'Test file content'