def test_download_isolated_simple(self): # Test downloading an isolated tree. # It writes files to disk for real. server_ref = isolate_storage.ServerRef('http://example.com', 'default-gzip') files = { os.path.join('a', 'foo'): 'Content', 'b': 'More content', } isolated = { 'command': ['Absurb', 'command'], 'relative_cwd': 'a', 'files': { os.path.join('a', 'foo'): { 'h': isolateserver_fake.hash_content('Content'), 's': len('Content'), 'm': 0700, }, 'b': { 'h': isolateserver_fake.hash_content('More content'), 's': len('More content'), 'm': 0600, }, 'c': { 'l': 'a/foo', }, }, 'read_only': 1, 'version': isolated_format.ISOLATED_FILE_VERSION, }
def test_fetch_offset_bad_header(self): server_ref = isolate_storage.ServerRef('http://example.com', 'default') data = ''.join(str(x) for x in range(1000)) item = isolateserver_fake.hash_content(data) offset = 200 size = len(data) bad_content_range_headers = [ # Missing header. None, '', # Bad format. 'not bytes %d-%d/%d' % (offset, size - 1, size), 'bytes %d-%d' % (offset, size - 1), # Bad offset. 'bytes %d-%d/%d' % (offset - 1, size - 1, size), # Incomplete chunk. 'bytes %d-%d/%d' % (offset, offset + 10, size), ] for content_range_header in bad_content_range_headers: self.expected_requests([ self.mock_fetch_request(server_ref, item, offset=offset), self.mock_gs_request( server_ref, item, data, offset=offset, request_headers={'Range': 'bytes=%d-' % offset}, response_headers={'Content-Range': content_range_header}), ]) storage = isolate_storage.IsolateServer(server_ref) with self.assertRaises(IOError): _ = ''.join(storage.fetch(item, 0, offset))
def test_main_naked(self): self.mock_popen_with_oserr() self.mock(on_error, 'report', lambda _: None) # The most naked .isolated file that can exist. self.mock(tools, 'disable_buffering', lambda: None) isolated = json_dumps({'command': ['invalid', 'command']}) isolated_hash = isolateserver_fake.hash_content(isolated) def get_storage(server_ref): return StorageFake({isolated_hash:isolated}, server_ref) self.mock(isolateserver, 'get_storage', get_storage) cmd = [ '--no-log', '--isolated', isolated_hash, '--cache', os.path.join(self.tempdir, 'isolated_cache'), '--isolate-server', 'https://localhost', '--named-cache-root', os.path.join(self.tempdir, 'named_cache'), '--root-dir', self.tempdir, ] ret = run_isolated.main(cmd) self.assertEqual(1, ret) self.assertEqual(1, len(self.popen_calls)) self.assertEqual( [ ( [self.ir_dir(u'invalid'), u'command'], {'cwd': self.ir_dir(), 'detached': True, 'close_fds': True}), ], self.popen_calls)
def test_main_args(self): self.mock(tools, 'disable_buffering', lambda: None) isolated = json_dumps({'command': ['foo.exe', 'cmd w/ space']}) isolated_hash = isolateserver_fake.hash_content(isolated) def get_storage(server_ref): return StorageFake({isolated_hash:isolated}, server_ref) self.mock(isolateserver, 'get_storage', get_storage) cmd = [ '--use-symlinks', '--no-log', '--isolated', isolated_hash, '--cache', os.path.join(self.tempdir, 'isolated_cache'), '--isolate-server', 'https://localhost', '--named-cache-root', os.path.join(self.tempdir, 'named_cache'), '--root-dir', self.tempdir, '--', '--extraargs', 'bar', ] ret = run_isolated.main(cmd) self.assertEqual(0, ret) self.assertEqual( [ ([self.ir_dir(u'foo.exe'), u'cmd w/ space', '--extraargs', 'bar'], {'cwd': self.ir_dir(), 'detached': True, 'close_fds': True}), ], self.popen_calls)
def _run_test(self, isolated, command, extra_args): # Starts a full isolate server mock and have run_tha_test() uploads results # back after the task completed. server = isolateserver_fake.FakeIsolateServer() try: # Output the following structure: # # foo1 # foodir --> foo2_sl (symlink to "foo2_content" file) # bardir --> bar1 # # Create the symlinks only on Linux. script = ( 'import os\n' 'import sys\n' 'open(sys.argv[1], "w").write("foo1")\n' 'bar1_path = os.path.join(sys.argv[3], "bar1")\n' 'open(bar1_path, "w").write("bar1")\n' 'if sys.platform.startswith("linux"):\n' ' foo_realpath = os.path.abspath("foo2_content")\n' ' open(foo_realpath, "w").write("foo2")\n' ' os.symlink(foo_realpath, sys.argv[2])\n' 'else:\n' ' open(sys.argv[2], "w").write("foo2")\n') script_hash = isolateserver_fake.hash_content(script) isolated['files']['cmd.py'] = { 'h': script_hash, 'm': 0700, 's': len(script), } if sys.platform == 'win32':
def test_fetch_success(self): server_ref = isolate_storage.ServerRef('http://example.com', 'default') data = ''.join(str(x) for x in range(1000)) item = isolateserver_fake.hash_content(data) self.expected_requests([self.mock_fetch_request(server_ref, item, data)]) storage = isolate_storage.IsolateServer(server_ref) fetched = ''.join(storage.fetch(item, 0, 0)) self.assertEqual(data, fetched)
def test_fetch_failure(self): server_ref = isolate_storage.ServerRef('http://example.com', 'default') item = isolateserver_fake.hash_content('something') self.expected_requests( [self.mock_fetch_request(server_ref, item)[:-1] + (None,)]) storage = isolate_storage.IsolateServer(server_ref) with self.assertRaises(IOError): _ = ''.join(storage.fetch(item, 0, 0))
def test_main_json(self): # Instruct the Popen mock to write a file in ISOLATED_OUTDIR so it will be # archived back on termination. self.mock(tools, 'disable_buffering', lambda: None) sub_cmd = [ self.ir_dir(u'foo.exe'), u'cmd with space', '${ISOLATED_OUTDIR}/out.txt', ] isolated_in_json = json_dumps({'command': sub_cmd}) isolated_in_hash = isolateserver_fake.hash_content(isolated_in_json) def get_storage(server_ref): return StorageFake({isolated_in_hash:isolated_in_json}, server_ref) self.mock(isolateserver, 'get_storage', get_storage) out = os.path.join(self.tempdir, 'res.json') cmd = [ '--no-log', '--isolated', isolated_in_hash, '--cache', os.path.join(self.tempdir, 'isolated_cache'), '--isolate-server', 'http://localhost:1', '--named-cache-root', os.path.join(self.tempdir, 'named_cache'), '--json', out, '--root-dir', self.tempdir, ] ret = run_isolated.main(cmd) self.assertEqual(0, ret) # Replace ${ISOLATED_OUTDIR} with the temporary directory. sub_cmd[2] = self.popen_calls[0][0][2] self.assertNotIn('ISOLATED_OUTDIR', sub_cmd[2]) self.assertEqual( [ (sub_cmd, {'cwd': self.ir_dir(), 'detached': True, 'close_fds': True}) ], self.popen_calls) isolated_out = { 'algo': 'sha-1', 'files': { 'out.txt': { 'h': isolateserver_fake.hash_content('generated data\n'), 's': 15, 'm': 0640, }, }, 'version': isolated_format.ISOLATED_FILE_VERSION, }
def help_test_archive(self, cmd_line_prefix): self.mock(isolateserver, 'get_storage', get_storage) self.make_tree(CONTENTS) isolateserver.main(cmd_line_prefix + [self.tempdir]) isolated = { 'algo': 'sha-1', 'files': {}, 'version': isolated_format.ISOLATED_FILE_VERSION, } for k, v in CONTENTS.items(): isolated['files'][k] = { 'h': isolateserver_fake.hash_content(v), 's': len(v), } if sys.platform != 'win32': isolated['files'][k]['m'] = 0o600 isolated_data = json.dumps(isolated, sort_keys=True, separators=(',', ':')) isolated_hash = isolateserver_fake.hash_content(isolated_data) self.checkOutput( '%s %s\n' % (isolated_hash, self.tempdir), '')
def test_modified_cwd(self): isolated = json_dumps({ 'command': ['../out/some.exe', 'arg'], 'relative_cwd': 'some', }) isolated_hash = isolateserver_fake.hash_content(isolated) files = {isolated_hash:isolated} _ = self._run_tha_test(isolated_hash, files) self.assertEqual( [ ( [self.ir_dir(u'out', u'some.exe'), 'arg'], {'cwd': self.ir_dir('some'), 'detached': True, 'close_fds': True}), ], self.popen_calls)
def test_python_cmd(self): isolated = json_dumps({ 'command': ['../out/cmd.py', 'arg'], 'relative_cwd': 'some', }) isolated_hash = isolateserver_fake.hash_content(isolated) files = {isolated_hash:isolated} _ = self._run_tha_test(isolated_hash, files) # Injects sys.executable but on macOS, the path may be different than # sys.executable due to symlinks. self.assertEqual(1, len(self.popen_calls)) cmd, args = self.popen_calls[0] self.assertEqual( {'cwd': self.ir_dir('some'), 'detached': True, 'close_fds': True}, args) self.assertIn('python', cmd[0]) self.assertEqual([os.path.join(u'..', 'out', 'cmd.py'), u'arg'], cmd[1:])
def test_run_tha_test_naked(self): isolated = json_dumps({'command': ['invalid', 'command']}) isolated_hash = isolateserver_fake.hash_content(isolated) files = {isolated_hash:isolated} make_tree_call = self._run_tha_test(isolated_hash, files) self.assertEqual( [ 'make_tree_writeable', 'make_tree_deleteable', 'make_tree_deleteable', 'make_tree_deleteable', ], make_tree_call) self.assertEqual( [ ([self.ir_dir(u'invalid'), u'command'], {'cwd': self.ir_dir(), 'detached': True, 'close_fds': True}), ], self.popen_calls)
def test_fetch_offset_success(self): server_ref = isolate_storage.ServerRef('http://example.com', 'default') data = ''.join(str(x) for x in range(1000)) item = isolateserver_fake.hash_content(data) offset = 200 size = len(data) good_content_range_headers = [ 'bytes %d-%d/%d' % (offset, size - 1, size), 'bytes %d-%d/*' % (offset, size - 1), ] for _content_range_header in good_content_range_headers: self.expected_requests( [self.mock_fetch_request(server_ref, item, data, offset=offset)]) storage = isolate_storage.IsolateServer(server_ref) fetched = ''.join(storage.fetch(item, 0, offset)) self.assertEqual(data[offset:], fetched)
def test_output(self): # Starts a full isolate server mock and have run_tha_test() uploads results # back after the task completed. server = isolateserver_fake.FakeIsolateServer() try: script = ( 'import sys\n' 'open(sys.argv[1], "w").write("bar")\n') script_hash = isolateserver_fake.hash_content(script) isolated = { u'algo': u'sha-1', u'command': [u'cmd.py', u'${ISOLATED_OUTDIR}/foo'], u'files': { u'cmd.py': { u'h': script_hash, u'm': 0700, u's': len(script), }, }, u'version': isolated_format.ISOLATED_FILE_VERSION, } if sys.platform == 'win32':
class RunIsolatedTestRun(RunIsolatedTestBase): # Runs the actual command requested. def test_output(self): # Starts a full isolate server mock and have run_tha_test() uploads results # back after the task completed. server = isolateserver_fake.FakeIsolateServer() try: script = ( 'import sys\n' 'open(sys.argv[1], "w").write("bar")\n') script_hash = isolateserver_fake.hash_content(script) isolated = { u'algo': u'sha-1', u'command': [u'cmd.py', u'${ISOLATED_OUTDIR}/foo'], u'files': { u'cmd.py': { u'h': script_hash, u'm': 0700, u's': len(script), }, }, u'version': isolated_format.ISOLATED_FILE_VERSION, } if sys.platform == 'win32': isolated[u'files'][u'cmd.py'].pop(u'm') isolated_data = json_dumps(isolated) isolated_hash = isolateserver_fake.hash_content(isolated_data) server.add_content('default-store', script) server.add_content('default-store', isolated_data) store = isolateserver.get_storage( isolate_storage.ServerRef(server.url, 'default-store')) self.mock(sys, 'stdout', StringIO.StringIO()) data = run_isolated.TaskData( command=[], relative_cwd=None, extra_args=[], isolated_hash=isolated_hash, storage=store, isolate_cache=local_caching.MemoryContentAddressedCache(), outputs=None, install_named_caches=init_named_caches_stub, leak_temp_dir=False, root_dir=None, hard_timeout=60, grace_period=30, bot_file=None, switch_to_account=False, install_packages_fn=run_isolated.noop_install_packages, use_symlinks=False, env={}, env_prefix={}) ret = run_isolated.run_tha_test(data, None) self.assertEqual(0, ret) # It uploaded back. Assert the store has a new item containing foo. hashes = {isolated_hash, script_hash} output_hash = isolateserver_fake.hash_content('bar') hashes.add(output_hash) isolated = { u'algo': u'sha-1', u'files': { u'foo': { u'h': output_hash, # TODO(maruel): Handle umask. u'm': 0640, u's': 3, }, }, u'version': isolated_format.ISOLATED_FILE_VERSION, }
'file1.txt': file_meta('file1.txt') }, 'read_only': 1, }) CONTENTS['manifest1.isolated'] = json.dumps( {'files': { 'file1.txt': file_meta('file1.txt') }}) CONTENTS['manifest2.isolated'] = json.dumps({ 'files': { 'file2.txt': file_meta('file2.txt') }, 'includes': [ isolateserver_fake.hash_content(CONTENTS['manifest1.isolated']), ], }) CONTENTS['tar_archive.isolated'] = json.dumps({ 'command': ['python', 'archive_files.py'], 'files': { 'archive': { 'h': isolateserver_fake.hash_content(CONTENTS['tar_archive']), 's': len(CONTENTS['tar_archive']), 't': 'tar', }, 'archive_files.py': file_meta('archive_files.py'), }, })
class RunIsolatedTestOutputFiles(RunIsolatedTestBase): # Like RunIsolatedTestRun, but ensures that specific output files # (as opposed to anything in $(ISOLATED_OUTDIR)) are returned. def _run_test(self, isolated, command, extra_args): # Starts a full isolate server mock and have run_tha_test() uploads results # back after the task completed. server = isolateserver_fake.FakeIsolateServer() try: # Output the following structure: # # foo1 # foodir --> foo2_sl (symlink to "foo2_content" file) # bardir --> bar1 # # Create the symlinks only on Linux. script = ( 'import os\n' 'import sys\n' 'open(sys.argv[1], "w").write("foo1")\n' 'bar1_path = os.path.join(sys.argv[3], "bar1")\n' 'open(bar1_path, "w").write("bar1")\n' 'if sys.platform.startswith("linux"):\n' ' foo_realpath = os.path.abspath("foo2_content")\n' ' open(foo_realpath, "w").write("foo2")\n' ' os.symlink(foo_realpath, sys.argv[2])\n' 'else:\n' ' open(sys.argv[2], "w").write("foo2")\n') script_hash = isolateserver_fake.hash_content(script) isolated['files']['cmd.py'] = { 'h': script_hash, 'm': 0700, 's': len(script), } if sys.platform == 'win32': isolated['files']['cmd.py'].pop('m') isolated_data = json_dumps(isolated) isolated_hash = isolateserver_fake.hash_content(isolated_data) server.add_content('default-store', script) server.add_content('default-store', isolated_data) store = isolateserver.get_storage( isolate_storage.ServerRef(server.url, 'default-store')) self.mock(sys, 'stdout', StringIO.StringIO()) data = run_isolated.TaskData( command=command, relative_cwd=None, extra_args=extra_args, isolated_hash=isolated_hash, storage=store, isolate_cache=local_caching.MemoryContentAddressedCache(), outputs=['foo1', 'foodir/foo2_sl', 'bardir/'], install_named_caches=init_named_caches_stub, leak_temp_dir=False, root_dir=None, hard_timeout=60, grace_period=30, bot_file=None, switch_to_account=False, install_packages_fn=run_isolated.noop_install_packages, use_symlinks=False, env={}, env_prefix={}) ret = run_isolated.run_tha_test(data, None) self.assertEqual(0, ret) # It uploaded back. Assert the store has a new item containing foo. hashes = {isolated_hash, script_hash} foo1_output_hash = isolateserver_fake.hash_content('foo1') foo2_output_hash = isolateserver_fake.hash_content('foo2') bar1_output_hash = isolateserver_fake.hash_content('bar1') hashes.add(foo1_output_hash) hashes.add(foo2_output_hash) hashes.add(bar1_output_hash) isolated = { u'algo': u'sha-1', u'files': { u'foo1': { u'h': foo1_output_hash, # TODO(maruel): Handle umask. u'm': 0640, u's': 4, }, u'foodir/foo2_sl': { u'h': foo2_output_hash, # TODO(maruel): Handle umask. u'm': 0640, u's': 4, }, u'bardir/bar1': { u'h': bar1_output_hash, # TODO(maruel): Handle umask. u'm': 0640, u's': 4, }, }, u'version': isolated_format.ISOLATED_FILE_VERSION, }
def __init__(self, data, high_priority=False): super(FakeItem, self).__init__( isolateserver_fake.hash_content(data), len(data), high_priority) self.data = data
def test_download_isolated_tar_archive(self): # Test downloading an isolated tree. server_ref = isolate_storage.ServerRef('http://example.com', 'default-gzip') files = { os.path.join('a', 'foo'): ('Content', 0o500), 'b': ('More content', 0o400), 'c': ('Even more content!', 0o500), } # Generate a tar archive tf = io.BytesIO() with tarfile.TarFile(mode='w', fileobj=tf) as tar: f1 = tarfile.TarInfo() f1.type = tarfile.REGTYPE f1.name = 'a/foo' f1.size = 7 f1.mode = 0o570 tar.addfile(f1, io.BytesIO('Content')) f2 = tarfile.TarInfo() f2.type = tarfile.REGTYPE f2.name = 'b' f2.size = 12 f2.mode = 0o666 tar.addfile(f2, io.BytesIO('More content')) archive = tf.getvalue() isolated = { 'command': ['Absurb', 'command'], 'relative_cwd': 'a', 'files': { 'archive1': { 'h': isolateserver_fake.hash_content(archive), 's': len(archive), 't': 'tar', }, 'c': { 'h': isolateserver_fake.hash_content(files['c'][0]), 's': len(files['c'][0]), }, }, 'read_only': 1, 'version': isolated_format.ISOLATED_FILE_VERSION, } isolated_data = json.dumps(isolated, sort_keys=True, separators=(',', ':')) isolated_hash = isolateserver_fake.hash_content(isolated_data) requests = [ (isolated['files']['archive1']['h'], archive), (isolated['files']['c']['h'], files['c'][0]), ] requests.append((isolated_hash, isolated_data)) requests = [ ( '%s/_ah/api/isolateservice/v1/retrieve' % server_ref.url, { 'data': { 'digest': h.encode('utf-8'), 'namespace': { 'namespace': 'default-gzip', 'digest_hash': 'sha-1', 'compression': 'flate', }, 'offset': 0, }, 'read_timeout': 60, }, {'content': base64.b64encode(zlib.compress(v))}, ) for h, v in requests ] cmd = [ 'download', '--isolate-server', server_ref.url, '--namespace', server_ref.namespace, '--target', os.path.join(self.tempdir, 'target'), '--isolated', isolated_hash, '--cache', os.path.join(self.tempdir, 'cache'), ] self.expected_requests(requests) self.assertEqual(0, isolateserver.main(cmd)) expected = { os.path.join(self.tempdir, 'target', k): v for k, v in files.items() } actual = self._get_actual() self.assertEqual(expected, actual) expected_stdout = ( 'To run this test please run from the directory %s:\n Absurb command\n' % os.path.join(self.tempdir, 'target', 'a')) self.checkOutput(expected_stdout, '')
def test_download_isolated_simple(self): # Test downloading an isolated tree. # It writes files to disk for real. server_ref = isolate_storage.ServerRef('http://example.com', 'default-gzip') files = { os.path.join('a', 'foo'): 'Content', 'b': 'More content', } isolated = { 'command': ['Absurb', 'command'], 'relative_cwd': 'a', 'files': { os.path.join('a', 'foo'): { 'h': isolateserver_fake.hash_content('Content'), 's': len('Content'), 'm': 0o700, }, 'b': { 'h': isolateserver_fake.hash_content('More content'), 's': len('More content'), 'm': 0o600, }, 'c': { 'l': 'a/foo', }, }, 'read_only': 1, 'version': isolated_format.ISOLATED_FILE_VERSION, } isolated_data = json.dumps(isolated, sort_keys=True, separators=(',', ':')) isolated_hash = isolateserver_fake.hash_content(isolated_data) requests = [ (v['h'], files[k]) for k, v in isolated['files'].items() if 'h' in v ] requests.append((isolated_hash, isolated_data)) requests = [ ( '%s/_ah/api/isolateservice/v1/retrieve' % server_ref.url, { 'data': { 'digest': h.encode('utf-8'), 'namespace': { 'namespace': 'default-gzip', 'digest_hash': 'sha-1', 'compression': 'flate', }, 'offset': 0, }, 'read_timeout': 60, }, {'content': base64.b64encode(zlib.compress(v))}, ) for h, v in requests ] cmd = [ 'download', '--isolate-server', server_ref.url, '--namespace', server_ref.namespace, '--target', os.path.join(self.tempdir, 'target'), '--isolated', isolated_hash, '--cache', os.path.join(self.tempdir, 'cache'), ] self.expected_requests(requests) self.assertEqual(0, isolateserver.main(cmd)) expected = { os.path.join(self.tempdir, 'target', 'a', 'foo'): ('Content', 0o500), os.path.join(self.tempdir, 'target', 'b'): ('More content', 0o400), os.path.join(self.tempdir, 'target', 'c'): (u'a/foo', 0), } actual = self._get_actual() self.assertEqual(expected, actual) expected_stdout = ( 'To run this test please run from the directory %s:\n Absurb command\n' % os.path.join(self.tempdir, 'target', 'a')) self.checkOutput(expected_stdout, '')
'file1.txt': file_meta('file1.txt') }, 'read_only': 1, }).encode() CONTENTS['manifest1.isolated'] = json.dumps({ 'files': { 'file1.txt': file_meta('file1.txt') } }).encode() CONTENTS['tar_archive.isolated'] = json.dumps({ 'command': ['python', 'archive_files.py'], 'files': { 'archive': { 'h': isolateserver_fake.hash_content(CONTENTS['tar_archive']), 's': len(CONTENTS['tar_archive']), 't': 'tar', }, 'archive_files.py': file_meta('archive_files.py'), }, }).encode() CONTENTS['max_path.isolated'] = json.dumps({ 'command': ['python', 'max_path.py'], 'files': { 'a' * 200 + '/' + 'b' * 200: file_meta('file1.txt'), 'max_path.py': file_meta('max_path.py'), }, }).encode()
'm': 0700, }, 'b': { 'h': isolateserver_fake.hash_content('More content'), 's': len('More content'), 'm': 0600, }, 'c': { 'l': 'a/foo', }, }, 'read_only': 1, 'version': isolated_format.ISOLATED_FILE_VERSION, } isolated_data = json.dumps(isolated, sort_keys=True, separators=(',', ':')) isolated_hash = isolateserver_fake.hash_content(isolated_data) requests = [ (v['h'], files[k]) for k, v in isolated['files'].iteritems() if 'h' in v ] requests.append((isolated_hash, isolated_data)) requests = [ ( '%s/_ah/api/isolateservice/v1/retrieve' % server_ref.url, { 'data': { 'digest': h.encode('utf-8'), 'namespace': { 'namespace': 'default-gzip', 'digest_hash': 'sha-1', 'compression': 'flate',
def file_meta(filename): return { 'h': isolateserver_fake.hash_content(CONTENTS[filename]), 's': len(CONTENTS[filename]), }
isolated = { u'algo': u'sha-1', u'files': { u'foo': { u'h': output_hash, # TODO(maruel): Handle umask. u'm': 0640, u's': 3, }, }, u'version': isolated_format.ISOLATED_FILE_VERSION, } if sys.platform == 'win32': isolated[u'files'][u'foo'].pop(u'm') uploaded = json_dumps(isolated) uploaded_hash = isolateserver_fake.hash_content(uploaded) hashes.add(uploaded_hash) self.assertEqual(hashes, set(server.contents['default-store'])) expected = ''.join([ '[run_isolated_out_hack]', '{"hash":"%s","namespace":"default-store","storage":%s}' % ( uploaded_hash, json.dumps(server.url)), '[/run_isolated_out_hack]' ]) + '\n' self.assertEqual(expected, sys.stdout.getvalue()) finally: server.close() FILE, LINK, RELATIVE_LINK, DIR = range(4)