def test_add_buffer(self): pkg = zip_package.ZipPackage(self.temp_dir) pkg.add_buffer('buf1', b'') self.assertEqual(list(pkg.files), ['buf1']) # No text types. with self.assertRaises(AssertionError): pkg.add_buffer('buf2', u'unicode')
def test_running_from_zip(self): # Test assumes that it runs from a normal checkout, not a zip. self.assertFalse(zip_package.is_zipped_module(sys.modules[__name__])) self.assertIsNone(zip_package.get_module_zip_archive(sys.modules[__name__])) self.assertTrue(os.path.abspath( zip_package.get_main_script_path()).startswith(ROOT_DIR)) # Build executable zip that calls same functions. pkg = zip_package.ZipPackage(self.temp_dir) pkg.add_directory(os.path.join(ROOT_DIR, 'utils'), 'utils') pkg.add_buffer('__main__.py', '\n'.join([ 'import sys', '', 'from utils import zip_package', '', 'print zip_package.is_zipped_module(sys.modules[__name__])', 'print zip_package.get_module_zip_archive(sys.modules[__name__])', 'print zip_package.get_main_script_path()', ])) zip_file = os.path.join(self.temp_dir, 'out.zip') pkg.zip_into_file(zip_file) # Run the zip, validate results. proc = subprocess.Popen( [sys.executable, zip_file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate() actual = out.strip().splitlines() expected = ['True', zip_file, zip_file,] self.assertEqual(err, '') self.assertEqual(actual, expected)
def test_archive_path_is_respected(self): self.stage_files(['a', 'b.py', 'dir/c']) pkg = zip_package.ZipPackage(self.temp_dir) pkg.add_file(os.path.join(self.temp_dir, 'a'), 'd1/a') pkg.add_python_file(os.path.join(self.temp_dir, 'b.py'), 'd2/b.py') pkg.add_directory(os.path.join(self.temp_dir, 'dir'), 'd3') self.assertEqual(set(pkg.files), set(['d1/a', 'd2/b.py', 'd3/c']))
def __init__( self, isolated_hash, test_name, shards, test_filter, slave_os, working_dir, isolate_server, verbose, profile, priority, algo): """Populates a manifest object. Args: isolated_hash - The manifest's sha-1 that the slave is going to fetch. test_name - The name to give the test request. shards - The number of swarm shards to request. test_filter - The gtest filter to apply when running the test. slave_os - OS to run on. working_dir - Relative working directory to start the script. isolate_server - isolate server url. verbose - if True, have the slave print more details. profile - if True, have the slave print more timing data. priority - int between 0 and 1000, lower the higher priority. algo - hashing algorithm used. """ self.isolated_hash = isolated_hash self.bundle = zip_package.ZipPackage(ROOT_DIR) self._test_name = test_name self._shards = shards self._test_filter = test_filter self._target_platform = slave_os self._working_dir = working_dir self.isolate_server = isolate_server self.storage = isolateserver.get_storage(isolate_server, 'default') self.verbose = bool(verbose) self.profile = bool(profile) self.priority = priority self._algo = algo self._isolate_item = None self._tasks = []
def test_running_from_zip(self): # Test assumes that it runs from a normal checkout, not a zip. self.assertFalse(zip_package.is_zipped_module(sys.modules[__name__])) self.assertIsNone( zip_package.get_module_zip_archive(sys.modules[__name__])) self.assertTrue( os.path.abspath(zip_package.get_main_script_path()).startswith( test_env.CLIENT_DIR)) # Build executable zip that calls same functions. pkg = zip_package.ZipPackage(self.temp_dir) pkg.add_directory(os.path.join(test_env.CLIENT_DIR, 'utils'), 'utils') pkg.add_buffer( '__main__.py', b'\n'.join([ b'import sys', b'', b'from utils import zip_package', b'', b'print(zip_package.is_zipped_module(sys.modules[__name__]))', b'print(zip_package.get_module_zip_archive(sys.modules[__name__]))', b'print(zip_package.get_main_script_path())', ])) zip_file = os.path.join(self.temp_dir, 'out.zip') pkg.zip_into_file(zip_file) # Run the zip, validate results. actual = check_output([sys.executable, zip_file]).strip().splitlines() self.assertEqual(['True', zip_file, zip_file], actual)
def test_adding_missing_files(self): pkg = zip_package.ZipPackage(self.temp_dir) with self.assertRaises(zip_package.ZipPackageError): pkg.add_file(os.path.join(self.temp_dir, 'im_not_here')) with self.assertRaises(zip_package.ZipPackageError): pkg.add_python_file(os.path.join(self.temp_dir, 'im_not_here.py')) with self.assertRaises(zip_package.ZipPackageError): pkg.add_directory(os.path.join(self.temp_dir, 'im_not_here_dir'))
def test_adding_dir_as_file(self): # Create 'dir'. self.stage_files(['dir/keep']) # Try to add it as file, not a directory. pkg = zip_package.ZipPackage(self.temp_dir) with self.assertRaises(zip_package.ZipPackageError): pkg.add_file(os.path.join(self.temp_dir, 'dir')) # Adding as directory works. pkg.add_directory(os.path.join(self.temp_dir, 'dir'))
def test_adding_py_instead_of_pyc(self): self.stage_files([ 'file.py', 'file.pyo', 'file.pyc', ]) for alternative in ('file.pyc', 'file.pyo'): pkg = zip_package.ZipPackage(self.temp_dir) pkg.add_python_file(os.path.join(self.temp_dir, alternative)) self.assertIn('file.py', pkg.files)
def test_repeatable_content(self): content = [] for _ in range(2): # Build temp dir content from scratch. assert not os.listdir(self.temp_dir) self.stage_files({'a': b'123', 'b': b'456', 'c': b'789'}) # Zip it. pkg = zip_package.ZipPackage(self.temp_dir) pkg.add_directory(self.temp_dir) content.append(pkg.zip_into_buffer()) # Clear everything. for name in os.listdir(self.temp_dir): os.remove(os.path.join(self.temp_dir, name)) # Contents of both runs should match exactly. self.assertEqual(content[0], content[1])
def test_zipping(self): data = {'a': b'123', 'b/c': b'456'} self.stage_files(data) pkg = zip_package.ZipPackage(self.temp_dir) pkg.add_directory(self.temp_dir) # Test zip_into_buffer produces readable zip with same content. for compress in (True, False): buf = pkg.zip_into_buffer(compress=compress) self.assertEqual(data, self.read_zip(io.BytesIO(buf))) # Test zip_into_file produces readable zip with same content. for compress in (True, False): path = os.path.join(self.temp_dir, 'pkg.zip') pkg.zip_into_file(path, compress=compress) with open(path, 'rb') as f: self.assertEqual(data, self.read_zip(f))
def test_extract_resource(self): pkg = zip_package.ZipPackage(self.temp_dir) pkg.add_directory(os.path.join(ROOT_DIR, 'utils'), 'utils') pkg.add_buffer('cert.pem', 'Certificate\n') pkg.add_buffer( '__main__.py', '\n'.join([ 'import sys', 'from utils import zip_package', 'print zip_package.extract_resource(sys.modules[__name__], \'cert.pem\')', ])) zip_file = os.path.join(self.temp_dir, 'out.zip') pkg.zip_into_file(zip_file) actual = check_output([sys.executable, zip_file]).strip() self.assertEqual(tempfile.gettempdir(), os.path.dirname(actual)) basename = os.path.basename(actual) self.assertTrue(basename.startswith('.zip_pkg-'), actual) self.assertTrue(basename.endswith('-cert.pem'), actual)
def test_extract_resource_temp_dir(self): pkg = zip_package.ZipPackage(self.temp_dir) pkg.add_directory(os.path.join(ROOT_DIR, 'utils'), 'utils') pkg.add_buffer('cert.pem', 'Certificate\n') pkg.add_buffer( '__main__.py', '\n'.join([ 'import sys', 'from utils import zip_package', 'print zip_package.extract_resource(', ' sys.modules[__name__], \'cert.pem\', %r)' % self.temp_dir, ])) zip_file = os.path.join(self.temp_dir, 'out.zip') pkg.zip_into_file(zip_file) actual = check_output([sys.executable, zip_file]).strip() expected = os.path.join( self.temp_dir, '321690737f78d081937f88c3fd0e625dd48ae07d-cert.pem') self.assertEqual(expected, actual)
def get_as_zip_package(executable=True): """Returns ZipPackage with this module and all its dependencies. If |executable| is True will store run_isolated.py as __main__.py so that zip package is directly executable be python. """ # Building a zip package when running from another zip package is # unsupported and probably unneeded. assert not zip_package.is_zipped_module(sys.modules[__name__]) assert THIS_FILE_PATH assert BASE_DIR package = zip_package.ZipPackage(root=BASE_DIR) package.add_python_file(THIS_FILE_PATH, '__main__.py' if executable else None) package.add_python_file(os.path.join(BASE_DIR, 'isolateserver.py')) package.add_python_file(os.path.join(BASE_DIR, 'auth.py')) package.add_directory(os.path.join(BASE_DIR, 'third_party')) package.add_directory(os.path.join(BASE_DIR, 'utils')) return package
def test_zip_bundle_files(self): manifest = swarming.Manifest( isolate_server='http://localhost:8081', namespace='default-gzip', isolated_hash=FILE_HASH, task_name=TEST_NAME, extra_args=None, env={}, dimensions={'os': 'Linux'}, deadline=60*60, verbose=False, profile=False, priority=101) bundle = zip_package.ZipPackage(swarming.ROOT_DIR) swarming.setup_run_isolated(manifest, bundle) self.assertEqual( set(['run_isolated.zip', 'swarm_cleanup.py']), set(bundle.files))
def __init__(self, isolate_server, namespace, isolated_hash, task_name, extra_args, shards, env, dimensions, working_dir, deadline, verbose, profile, priority): """Populates a manifest object. Args: isolate_server - isolate server url. namespace - isolate server namespace to use. isolated_hash - the manifest's sha-1 that the slave is going to fetch. task_name - the name to give the task request. extra_args - additional arguments to pass to isolated command. shards - the number of swarming shards to request. env - environment variables to set. dimensions - dimensions to filter the task on. working_dir - relative working directory to start the script. deadline - maximum pending time before this task expires. verbose - if True, have the slave print more details. profile - if True, have the slave print more timing data. priority - int between 0 and 1000, lower the higher priority. """ self.isolate_server = isolate_server self.namespace = namespace # The reason is that swarm_bot doesn't understand compressed data yet. So # the data to be downloaded by swarm_bot is in 'default', independent of # what run_isolated.py is going to fetch. self.storage = isolateserver.get_storage(isolate_server, 'default') self.isolated_hash = isolated_hash self.extra_args = tuple(extra_args or []) self.bundle = zip_package.ZipPackage(ROOT_DIR) self._task_name = task_name self._shards = shards self._env = env.copy() self._dimensions = dimensions.copy() self._working_dir = working_dir self._deadline = deadline self.verbose = bool(verbose) self.profile = bool(profile) self.priority = priority self._isolate_item = None self._tasks = []
def test_extract_resource_temp_dir(self): pkg = zip_package.ZipPackage(self.temp_dir) pkg.add_directory(os.path.join(test_env.CLIENT_DIR, 'utils'), 'utils') pkg.add_buffer('cert.pem', b'Certificate\n') pkg.add_buffer( '__main__.py', b'\n'.join([ b'import sys', b'from utils import zip_package', b'print(zip_package.extract_resource(', b' sys.modules[__name__], \'cert.pem\', %r))' % self.temp_dir, ])) zip_file = os.path.join(self.temp_dir, 'out.zip') pkg.zip_into_file(zip_file) actual = check_output([sys.executable, zip_file]).strip() expected = os.path.join( self.temp_dir, 'e47a41780d9cb4a1234e4915b14443bdaa8fae9f821b00f0f2fed719661572f6-cert.' 'pem') self.assertEqual(expected, actual)
def test_add_directory(self): should_add = [ 'script.py', 'a/1.txt', 'a/2.txt', 'a/b/3.txt', 'a/script.py', ] should_ignore = [ 'script.pyc', 'a/script.pyo', '.git/stuff', '.svn/stuff', 'a/.svn/stuff', 'a/b/.svn/stuff', ] # Add a whole set and verify only files from |should_add| were added. self.stage_files(should_add + should_ignore) pkg = zip_package.ZipPackage(self.temp_dir) pkg.add_directory(self.temp_dir) self.assertEqual(set(pkg.files), set(should_add))
def test_added_files_are_under_root(self): # Add some files to temp_dir. self.stage_files([ 'a.txt', 'p.py', 'pkg/1.txt', 'some_dir/2.txt', ]) # Adding using |archive_path| should work. pkg = zip_package.ZipPackage(os.path.join(self.temp_dir, 'pkg')) pkg.add_file(os.path.join(self.temp_dir, 'a.txt'), '_a.txt') pkg.add_python_file(os.path.join(self.temp_dir, 'p.py'), '_p.py') pkg.add_directory(os.path.join(self.temp_dir, 'pkg'), '_pkg') # Adding without |archive_path| should fail. with self.assertRaises(zip_package.ZipPackageError): pkg.add_file(os.path.join(self.temp_dir, 'a.txt')) with self.assertRaises(zip_package.ZipPackageError): pkg.add_python_file(os.path.join(self.temp_dir, 'p.py')) with self.assertRaises(zip_package.ZipPackageError): pkg.add_directory(os.path.join(self.temp_dir, 'a.txt'))
def test_require_absolute_file_paths(self): # Add some files to temp_dir. self.stage_files([ 'a.txt', 'b.py', 'c/c.txt', ]) # Item to add -> method used to add it. cases = [ ('a.txt', zip_package.ZipPackage.add_file), ('b.py', zip_package.ZipPackage.add_python_file), ('c', zip_package.ZipPackage.add_directory), ] for path, method in cases: pkg = zip_package.ZipPackage(self.temp_dir) # Absolute path is ok. method(pkg, os.path.join(self.temp_dir, path)) # Relative path is not ok. with self.assertRaises(AssertionError): method(pkg, path)
def trigger_task_shards(swarming, isolate_server, namespace, isolated_hash, task_name, extra_args, shards, dimensions, env, deadline, verbose, profile, priority): """Triggers multiple subtasks of a sharded task. Returns: dict(task_name: task_id). None in case of failure. """ # Collects all files that are necessary to bootstrap a task execution # on the bot. Usually it includes self contained run_isolated.zip and # a bunch of small other scripts. All heavy files are pulled # by run_isolated.zip. Updated in 'setup_run_isolated'. bundle = zip_package.ZipPackage(ROOT_DIR) # Make a separate Manifest for each shard, put shard index and number of # shards into env and subtask name. manifests = [] for index in xrange(shards): manifest = Manifest(isolate_server=isolate_server, namespace=namespace, isolated_hash=isolated_hash, task_name=get_shard_task_name( task_name, shards, index), extra_args=extra_args, dimensions=dimensions, env=setup_googletest(env, shards, index), deadline=deadline, verbose=verbose, profile=profile, priority=priority) setup_run_isolated(manifest, bundle) manifests.append(manifest) # Upload zip bundle file to get its URL. bundle_url = upload_zip_bundle(isolate_server, bundle) if not bundle_url: return None, None # Attach that file to all manifests. for manifest in manifests: manifest.add_bundled_file('swarm_data.zip', bundle_url) # Trigger all the subtasks. tasks = {} priority_warning = False for manifest in manifests: task_id, priority = trigger_by_manifest(swarming, manifest) if not task_id: break if not priority_warning and priority != manifest.priority: priority_warning = True print >> sys.stderr, 'Priority was reset to %s' % priority tasks[manifest.task_name] = task_id # Some shards weren't triggered. Abort everything. if len(tasks) != len(manifests): if tasks: print >> sys.stderr, 'Not all shards were triggered' for task_id in tasks.itervalues(): abort_task(swarming, task_id) return None return tasks
def test_require_absolute_root(self): # Absolute path is ok. zip_package.ZipPackage(self.temp_dir) # Relative path is not ok. with self.assertRaises(AssertionError): zip_package.ZipPackage('.')
def test_adding_non_python_as_python(self): self.stage_files(['file.sh']) pkg = zip_package.ZipPackage(self.temp_dir) with self.assertRaises(zip_package.ZipPackageError): pkg.add_python_file(os.path.join(self.temp_dir, 'file.sh'))
def test_adding_same_file_twice(self): self.stage_files(['file']) pkg = zip_package.ZipPackage(self.temp_dir) pkg.add_file(os.path.join(self.temp_dir, 'file')) with self.assertRaises(zip_package.ZipPackageError): pkg.add_file(os.path.join(self.temp_dir, 'file'))