def test_save_named(self): cache = self.get_cache(_get_policies()) self.assertEqual([], sorted(fs.listdir(cache.cache_dir))) self._add_one_item(cache, 2) with fs.open(os.path.join(cache.cache_dir, cache.STATE_FILE)) as f: old_content = json.load(f) # It's immediately saved. items = lru.LRUDict.load( os.path.join(cache.cache_dir, cache.STATE_FILE)) self.assertEqual(1, len(items)) _key, (v, _timestamp) = items.get_oldest() # This depends on the inner format as generated by NamedCache. entry_dir_name = v[0] self.assertEqual( sorted([entry_dir_name, cache.NAMED_DIR, cache.STATE_FILE]), sorted(fs.listdir(cache.cache_dir))) cache.save() self.assertEqual( sorted([entry_dir_name, cache.NAMED_DIR, cache.STATE_FILE]), sorted(fs.listdir(cache.cache_dir))) with fs.open(os.path.join(cache.cache_dir, cache.STATE_FILE)) as f: new_content = json.load(f) # That's because uninstall() called from self._add_one_item() # causes an implicit save(). See uninstall() comments for more details. self.assertEqual(new_content, old_content)
def test_putfile(self): tmpoutdir = None tmpindir = None try: tmpindir = tempfile.mkdtemp(prefix='isolateserver_test') infile = os.path.join(tmpindir, u'in') with fs.open(infile, 'wb') as f: f.write('data') tmpoutdir = tempfile.mkdtemp(prefix='isolateserver_test') # Copy as fileobj fo = os.path.join(tmpoutdir, u'fo') isolateserver.putfile(io.BytesIO('data'), fo) self.assertEqual(True, fs.exists(fo)) self.assertEqual(False, fs.islink(fo)) self.assertFile(fo, 'data') # Copy with partial fileobj pfo = os.path.join(tmpoutdir, u'pfo') fobj = io.BytesIO('adatab') fobj.read(1) # Read the 'a' isolateserver.putfile(fobj, pfo, size=4) self.assertEqual(True, fs.exists(pfo)) self.assertEqual(False, fs.islink(pfo)) self.assertEqual('b', fobj.read()) self.assertFile(pfo, 'data') # Copy as not readonly cp = os.path.join(tmpoutdir, u'cp') with fs.open(infile, 'rb') as f: isolateserver.putfile(f, cp, file_mode=0o755) self.assertEqual(True, fs.exists(cp)) self.assertEqual(False, fs.islink(cp)) self.assertFile(cp, 'data') # Use hardlink hl = os.path.join(tmpoutdir, u'hl') with fs.open(infile, 'rb') as f: isolateserver.putfile(f, hl, use_symlink=False) self.assertEqual(True, fs.exists(hl)) self.assertEqual(False, fs.islink(hl)) self.assertFile(hl, 'data') # Use symlink sl = os.path.join(tmpoutdir, u'sl') with fs.open(infile, 'rb') as f: isolateserver.putfile(f, sl, use_symlink=True) self.assertEqual(True, fs.exists(sl)) self.assertEqual(True, fs.islink(sl)) self.assertEqual('data', fs.open(sl, 'rb').read()) self.assertFile(sl, 'data') finally: if tmpindir: file_path.rmtree(tmpindir) if tmpoutdir: file_path.rmtree(tmpoutdir)
def put_to_named_cache(self, manager, cache_name, file_name, contents): """Puts files into named cache.""" cache_dir = os.path.join(self.tempdir, 'put_to_named_cache') manager.install(cache_dir, cache_name) with fs.open(os.path.join(cache_dir, file_name), 'wb') as f: f.write(contents) manager.uninstall(cache_dir, cache_name)
def prepare_for_archival(options, cwd): """Loads the isolated file and create 'infiles' for archival.""" complete_state = load_complete_state( options, cwd, options.subdir, False) # Make sure that complete_state isn't modified until save_files() is # called, because any changes made to it here will propagate to the files # created (which is probably not intended). complete_state.save_files() infiles = complete_state.saved_state.files # Add all the .isolated files. isolated_hash = [] isolated_files = [ options.isolated, ] + complete_state.saved_state.child_isolated_files for item in isolated_files: item_path = os.path.join( os.path.dirname(complete_state.isolated_filepath), item) # Do not use isolated_format.hash_file() here because the file is # likely smallish (under 500kb) and its file size is needed. with fs.open(item_path, 'rb') as f: content = f.read() isolated_hash.append( complete_state.saved_state.algo(content).hexdigest()) isolated_metadata = { 'h': isolated_hash[-1], 's': len(content), 'priority': '0' } infiles[item_path] = isolated_metadata return complete_state, infiles, isolated_hash
def prepare_for_archival(options, cwd): """Loads the isolated file and create 'infiles' for archival.""" complete_state = load_complete_state(options, cwd, options.subdir, False) # Make sure that complete_state isn't modified until save_files() is # called, because any changes made to it here will propagate to the files # created (which is probably not intended). complete_state.save_files() infiles = complete_state.saved_state.files # Add all the .isolated files. isolated_hash = [] isolated_files = [ options.isolated, ] + complete_state.saved_state.child_isolated_files for item in isolated_files: item_path = os.path.join( os.path.dirname(complete_state.isolated_filepath), item) # Do not use isolated_format.hash_file() here because the file is # likely smallish (under 500kb) and its file size is needed. with fs.open(item_path, 'rb') as f: content = f.read() isolated_hash.append( complete_state.saved_state.algo(content).hexdigest()) isolated_metadata = { 'h': isolated_hash[-1], 's': len(content), 'priority': '0' } infiles[item_path] = isolated_metadata return complete_state, infiles, isolated_hash
def getfileobj(self, digest): try: f = fs.open(self._path(digest), 'rb') with self._lock: self._used.append(self._lru[digest]) return f except IOError: raise CacheMiss(digest)
def test_rmtree_unicode(self): subdir = os.path.join(self.tempdir, "hi") fs.mkdir(subdir) filepath = os.path.join(subdir, u"\u0627\u0644\u0635\u064A\u0646\u064A\u0629") with fs.open(filepath, "wb") as f: f.write("hi") # In particular, it fails when the input argument is a str. file_path.rmtree(str(subdir))
def test_cleanup_unexpected(self): # cleanup() delete unexpected file in the cache directory. fs.mkdir(self.cache_dir) with fs.open(os.path.join(self.cache_dir, u'junk'), 'w') as f: f.write('random') cache = self.get_cache(_get_policies()) self.assertEqual(['junk'], fs.listdir(cache.cache_dir)) self.assertEqual(True, cache.cleanup()) self.assertEqual([cache.STATE_FILE], fs.listdir(cache.cache_dir))
def test_rmtree_unicode(self): subdir = os.path.join(self.tempdir, 'hi') fs.mkdir(subdir) filepath = os.path.join( subdir, u'\u0627\u0644\u0635\u064A\u0646\u064A\u0629') with fs.open(filepath, 'wb') as f: f.write('hi') # In particular, it fails when the input argument is a str. file_path.rmtree(str(subdir))
def test_expand_symlinks_path_case(self): # Ensures that the resulting path case is fixed on case insensitive file # system. fs.symlink('dest', os.path.join(self.cwd, u'link')) fs.mkdir(os.path.join(self.cwd, u'Dest')) fs.open(os.path.join(self.cwd, u'Dest', u'file.txt'), 'w').close() relfile, symlinks = isolated_format._expand_symlinks( self.cwd, u'.') self.assertEqual((u'.', []), (relfile, symlinks)) relfile, symlinks = isolated_format._expand_symlinks( self.cwd, u'link') self.assertEqual((u'Dest', [u'link']), (relfile, symlinks)) relfile, symlinks = isolated_format._expand_symlinks( self.cwd, u'link/File.txt') self.assertEqual((u'Dest/file.txt', [u'link']), (relfile, symlinks))
def CMDput_bot_config(parser, args): """Uploads a new version of bot_config.py.""" options, args = parser.parse_args(args) if len(args) != 1: parser.error('Must specify file to upload') url = options.swarming + '/_ah/api/swarming/v1/server/put_bot_config' path = unicode(os.path.abspath(args[0])) with fs.open(path, 'rb') as f: content = f.read().decode('utf-8') data = net.url_read_json(url, data={'content': content}) print data return 0
def load_included_isolate(isolate_dir, isolate_path): if os.path.isabs(isolate_path): raise IsolateError( 'Failed to load configuration; absolute include path \'%s\'' % isolate_path) included_isolate = os.path.normpath(os.path.join(isolate_dir, isolate_path)) if sys.platform == 'win32': if included_isolate[0].lower() != isolate_dir[0].lower(): raise IsolateError('Can\'t reference a .isolate file from another drive') with fs.open(included_isolate, 'r') as f: return load_isolate_as_config( os.path.dirname(included_isolate), eval_content(f.read()), None)
def test_symlink_input_absolute_path(self): # A symlink is outside of the checkout, it should be treated as a normal # directory. # .../src # .../src/out -> .../tmp/foo # .../tmp # .../tmp/foo src = os.path.join(self.cwd, u'src') src_out = os.path.join(src, u'out') tmp = os.path.join(self.cwd, u'tmp') tmp_foo = os.path.join(tmp, u'foo') fs.mkdir(src) fs.mkdir(tmp) fs.mkdir(tmp_foo) # The problem was that it's an absolute path, so it must be considered a # normal directory. fs.symlink(tmp, src_out) fs.open(os.path.join(tmp_foo, u'bar.txt'), 'w').close() relfile, symlinks = isolated_format._expand_symlinks( src, u'out/foo/bar.txt') self.assertEqual((u'out/foo/bar.txt', []), (relfile, symlinks))
def hash_file(filepath, algo): """Calculates the hash of a file without reading it all in memory at once. |algo| should be one of hashlib hashing algorithm. """ digest = algo() with fs.open(filepath, 'rb') as f: while True: chunk = f.read(DISK_FILE_CHUNK) if not chunk: break digest.update(chunk) return digest.hexdigest()
def CMDcollect(parser, args): """Retrieves results of one or multiple Swarming task by its ID. The result can be in multiple part if the execution was sharded. It can potentially have retries. """ add_collect_options(parser) parser.add_option( '-j', '--json', help='Load the task ids from .json as saved by trigger --dump-json') options, args = parser.parse_args(args) if not args and not options.json: parser.error('Must specify at least one task id or --json.') if args and options.json: parser.error('Only use one of task id or --json.') if options.json: options.json = unicode(os.path.abspath(options.json)) try: with fs.open(options.json, 'rb') as f: data = json.load(f) except (IOError, ValueError): parser.error('Failed to open %s' % options.json) try: tasks = sorted( data['tasks'].itervalues(), key=lambda x: x['shard_index']) args = [t['task_id'] for t in tasks] except (KeyError, TypeError): parser.error('Failed to process %s' % options.json) if options.timeout is None: options.timeout = ( data['request']['properties']['execution_timeout_secs'] + data['request']['expiration_secs'] + 10.) else: valid = frozenset('0123456789abcdef') if any(not valid.issuperset(task_id) for task_id in args): parser.error('Task ids are 0-9a-f.') try: return collect( options.swarming, args, options.timeout, options.decorate, options.print_status_updates, options.task_summary_json, options.task_output_dir, options.perf) except Failure: on_error.report(None) return 1
def test_file_to_metadata_path_case_collapse(self): # Ensure setting the collapse_symlink option doesn't include the symlinks basedir = os.path.join(self.cwd, u'basedir') fs.mkdir(basedir) subdir = os.path.join(basedir, u'subdir') fs.mkdir(subdir) linkdir = os.path.join(basedir, u'linkdir') fs.mkdir(linkdir) foo_file = os.path.join(subdir, u'Foo.txt') fs.open(foo_file, 'w').close() sym_file = os.path.join(basedir, u'linkdir', u'Sym.txt') fs.symlink('../subdir/Foo.txt', sym_file) actual = isolated_format.file_to_metadata(sym_file, True, True) actual['h'] = isolated_format.hash_file(sym_file, ALGO) expected = { # SHA-1 of empty string 'h': 'da39a3ee5e6b4b0d3255bfef95601890afd80709', 'm': 256, 's': 0, } self.assertEqual(expected, actual)
def getfileobj(self, digest): try: f = fs.open(self._path(digest), 'rb') except IOError: raise CacheMiss(digest) with self._lock: try: self._used.append(self._lru[digest]) except KeyError: # If the digest is not actually in _lru, assume it is a cache miss. # Existing file will be overwritten by whoever uses the cache and added # to _lru. f.close() raise CacheMiss(digest) return f
def load_included_isolate(isolate_dir, isolate_path): if os.path.isabs(isolate_path): raise IsolateError( 'Failed to load configuration; absolute include path \'%s\'' % isolate_path) included_isolate = os.path.normpath(os.path.join(isolate_dir, isolate_path)) if sys.platform == 'win32': if included_isolate[0].lower() != isolate_dir[0].lower(): raise IsolateError( 'Can\'t reference a .isolate file from another drive') with fs.open(included_isolate, 'r') as f: return load_isolate_as_config( os.path.dirname(included_isolate), eval_content(f.read()), None)
def test_upgrade(self): # Make sure upgrading works. This is temporary as eventually all bots will # be updated. now = time.time() fs.mkdir(self.cache_dir) fs.mkdir(os.path.join(self.cache_dir, 'f1')) with fs.open(os.path.join(self.cache_dir, 'f1', 'hello'), 'wb') as f: f.write('world') # v1 old = { 'version': 2, 'items': [ ['cache1', ['f1', now]], ], } c = local_caching.NamedCache with fs.open(os.path.join(self.cache_dir, c.STATE_FILE), 'w') as f: json.dump(old, f) # It automatically upgrades to v2. cache = self.get_cache(_get_policies()) expected = {u'cache1': ((u'f1', len('world')), now)} self.assertEqual(expected, dict(cache._lru._items.iteritems())) self.assertEqual([u'f1', cache.STATE_FILE], sorted(fs.listdir(cache.cache_dir)))
def file_write(path, content_generator): """Writes file content as generated by content_generator. Creates the intermediary directory as needed. Returns the number of bytes written. Meant to be mocked out in unit tests. """ file_path.ensure_tree(os.path.dirname(path)) total = 0 with fs.open(path, 'wb') as f: for d in content_generator: total += len(d) f.write(d) return total
def test_symlink_absolute(self): # A symlink to an absolute path is valid. # /dir # /dir/file # /ld -> /dir # /lf -> /ld/file dirpath = os.path.join(self.tempdir, 'dir') filepath = os.path.join(dirpath, 'file') fs.mkdir(dirpath) write_content(filepath, b'hello') linkfile = os.path.join(self.tempdir, 'lf') linkdir = os.path.join(self.tempdir, 'ld') dstfile = os.path.join(linkdir, 'file') fs.symlink(dstfile, linkfile) fs.symlink(dirpath, linkdir) self.assertEqual(True, fs.islink(linkfile)) self.assertEqual(True, fs.islink(linkdir)) self.assertEqual(dstfile, fs.readlink(linkfile)) self.assertEqual(dirpath, fs.readlink(linkdir)) self.assertEqual(['file'], fs.listdir(linkdir)) # /lf resolves to /dir/file. with fs.open(linkfile) as f: self.assertEqual('hello', f.read()) # Ensures that followlinks is respected in walk(). expected = [ (self.tempdir, ['dir', 'ld'], ['lf']), (dirpath, [], ['file']), ] actual = [ (r, sorted(d), sorted(f)) for r, d, f in sorted(fs.walk(self.tempdir, followlinks=False)) ] self.assertEqual(expected, actual) expected = [ (self.tempdir, ['dir', 'ld'], ['lf']), (dirpath, [], ['file']), (linkdir, [], ['file']), ] actual = [ (r, sorted(d), sorted(f)) for r, d, f in sorted(fs.walk(self.tempdir, followlinks=True)) ] self.assertEqual(expected, actual)
def test_cycle_twice(self): # Ensure that named symlink works. cache = self.get_cache(_get_policies()) dest_dir = os.path.join(self.tempdir, 'dest') self.assertEqual(0, cache.install(dest_dir, u'1')) with fs.open(os.path.join(dest_dir, u'hi'), 'wb') as f: f.write('hello') self.assertEqual(5, cache.uninstall(dest_dir, u'1')) self.assertEqual([u'1'], fs.listdir( os.path.join(cache.cache_dir, cache.NAMED_DIR))) self.assertEqual(True, cache.cleanup()) self.assertEqual(5, cache.install(dest_dir, u'1')) self.assertEqual(5, cache.uninstall(dest_dir, u'1')) self.assertEqual([u'1'], fs.listdir( os.path.join(cache.cache_dir, cache.NAMED_DIR))) self.assertEqual([u'hi'], fs.listdir( os.path.join(cache.cache_dir, cache.NAMED_DIR, u'1')))
def test_fileobj_path(self): # No path on in-memory objects self.assertIs(None, isolateserver.fileobj_path(io.BytesIO('hello'))) # Path on opened files thisfile = os.path.join(test_env.TESTS_DIR, 'isolateserver_test.py') f = fs.open(thisfile) result = isolateserver.fileobj_path(f) self.assertIsInstance(result, six.text_type) self.assertSequenceEqual(result, thisfile) # Path on temporary files tf = tempfile.NamedTemporaryFile() result = isolateserver.fileobj_path(tf) self.assertIsInstance(result, six.text_type) self.assertSequenceEqual(result, tf.name) # No path on files which are no longer on the file system tf = tempfile.NamedTemporaryFile(delete=False) fs.unlink(tf.name.decode(sys.getfilesystemencoding())) self.assertIs(None, isolateserver.fileobj_path(tf))
def test_cleanup_unexpected_named(self): # cleanup() deletes unexpected symlink and directory in named/. fs.mkdir(self.cache_dir) c = local_caching.NamedCache fs.mkdir(os.path.join(self.cache_dir, c.NAMED_DIR)) p = os.path.join(self.cache_dir, c.NAMED_DIR, u'junk_file') with fs.open(p, 'w') as f: f.write('random') fs.mkdir(os.path.join(self.cache_dir, c.NAMED_DIR, u'junk_dir')) fs.symlink('invalid_dest', os.path.join(self.cache_dir, c.NAMED_DIR, u'junk_link')) cache = self.get_cache(_get_policies()) self.assertEqual([cache.NAMED_DIR], fs.listdir(cache.cache_dir)) self.assertEqual( ['junk_dir', 'junk_file', 'junk_link'], sorted(fs.listdir(os.path.join(cache.cache_dir, cache.NAMED_DIR)))) self.assertEqual(True, cache.cleanup()) self.assertEqual([cache.NAMED_DIR, cache.STATE_FILE], sorted(fs.listdir(cache.cache_dir))) self.assertEqual([], fs.listdir( os.path.join(cache.cache_dir, cache.NAMED_DIR)))
def test_load_corrupted_state(self): # cleanup() handles a broken state file. fs.mkdir(self.cache_dir) c = local_caching.NamedCache with fs.open(os.path.join(self.cache_dir, c.STATE_FILE), 'w') as f: f.write('}}}}') fs.makedirs(os.path.join(self.cache_dir, '1'), 0777) cache = self.get_cache(_get_policies()) self._add_one_item(cache, 1) self.assertTrue( fs.exists(os.path.join(cache.cache_dir, cache.NAMED_DIR, '1'))) self.assertTrue( fs.islink(os.path.join(cache.cache_dir, cache.NAMED_DIR, '1'))) self.assertEqual([], cache.trim()) self.assertTrue( fs.exists(os.path.join(cache.cache_dir, cache.NAMED_DIR, '1'))) self.assertTrue( fs.islink(os.path.join(cache.cache_dir, cache.NAMED_DIR, '1'))) self.assertEqual(True, cache.cleanup()) self.assertEqual( sorted([cache.NAMED_DIR, cache.STATE_FILE, cache._lru[u'1'][0]]), sorted(fs.listdir(cache.cache_dir)))
def _add_one_item(self, cache, size): """Adds one item of |size| bytes in the cache and returns the created name. """ # Don't allow 0 byte items here. This doesn't work for named cache. self.assertTrue(size) data = _gen_data(size) if isinstance(cache, local_caching.ContentAddressedCache): # This covers both MemoryContentAddressedCache and # DiskContentAddressedCache. return cache.write(self._algo(data).hexdigest(), [data]) elif isinstance(cache, local_caching.NamedCache): # In this case, map a named cache, add a file, unmap it. dest_dir = os.path.join(self.tempdir, 'dest') self.assertFalse(fs.exists(dest_dir)) name = unicode(size) cache.install(dest_dir, name) # Put a file in there named 'hello', otherwise it'll stay empty. with fs.open(os.path.join(dest_dir, 'hello'), 'wb') as f: f.write(data) cache.uninstall(dest_dir, name) self.assertFalse(fs.exists(dest_dir)) return name self.fail('Unexpected cache type %r' % cache) return None
def CMDquery_list(parser, args): """Returns list of all the Swarming APIs that can be used with command 'query'. """ parser.add_option( '--json', help='Path to JSON output file (otherwise prints to stdout)') options, args = parser.parse_args(args) if args: parser.error('No argument allowed.') try: apis = endpoints_api_discovery_apis(options.swarming) except APIError as e: parser.error(str(e)) if options.json: options.json = unicode(os.path.abspath(options.json)) with fs.open(options.json, 'wb') as f: json.dump(apis, f) else: help_url = ( 'https://apis-explorer.appspot.com/apis-explorer/?base=%s/_ah/api#p/' % options.swarming) for api_id, api in sorted(apis.iteritems()): print api_id print ' ' + api['description'] for resource_name, resource in sorted(api['resources'].iteritems()): print '' for method_name, method in sorted(resource['methods'].iteritems()): # Only list the GET ones. if method['httpMethod'] != 'GET': continue print '- %s.%s: %s' % ( resource_name, method_name, method['path']) print ' ' + method['description'] print ' %s%s%s' % (help_url, api['servicePath'], method['id']) return 0
def write_content(filepath, content): with fs.open(filepath, 'wb') as f: f.write(content)
def load_isolate( self, cwd, isolate_file, path_variables, config_variables, extra_variables, blacklist, ignore_broken_items): """Updates self.isolated and self.saved_state with information loaded from a .isolate file. Processes the loaded data, deduce root_dir, relative_cwd. """ # Make sure to not depend on os.getcwd(). assert os.path.isabs(isolate_file), isolate_file isolate_file = file_path.get_native_path_case(isolate_file) logging.info( 'CompleteState.load_isolate(%s, %s, %s, %s, %s, %s)', cwd, isolate_file, path_variables, config_variables, extra_variables, ignore_broken_items) # Config variables are not affected by the paths and must be used to # retrieve the paths, so update them first. self.saved_state.update_config(config_variables) with fs.open(isolate_file, 'r') as f: # At that point, variables are not replaced yet in command and infiles. # infiles may contain directory entries and is in posix style. command, infiles, read_only, isolate_cmd_dir = ( isolate_format.load_isolate_for_config( os.path.dirname(isolate_file), f.read(), self.saved_state.config_variables)) # Processes the variables with the new found relative root. Note that 'cwd' # is used when path variables are used. path_variables = normalize_path_variables( cwd, path_variables, isolate_cmd_dir) # Update the rest of the saved state. self.saved_state.update(isolate_file, path_variables, extra_variables) total_variables = self.saved_state.path_variables.copy() total_variables.update(self.saved_state.config_variables) total_variables.update(self.saved_state.extra_variables) command = [ isolate_format.eval_variables(i, total_variables) for i in command ] total_variables = self.saved_state.path_variables.copy() total_variables.update(self.saved_state.extra_variables) infiles = [ isolate_format.eval_variables(f, total_variables) for f in infiles ] # root_dir is automatically determined by the deepest root accessed with the # form '../../foo/bar'. Note that path variables must be taken in account # too, add them as if they were input files. self.saved_state.root_dir = isolate_format.determine_root_dir( isolate_cmd_dir, infiles + self.saved_state.path_variables.values()) # The relative directory is automatically determined by the relative path # between root_dir and the directory containing the .isolate file, # isolate_base_dir. relative_cwd = os.path.relpath(isolate_cmd_dir, self.saved_state.root_dir) # Now that we know where the root is, check that the path_variables point # inside it. for k, v in self.saved_state.path_variables.iteritems(): dest = os.path.join(isolate_cmd_dir, relative_cwd, v) if not file_path.path_starts_with(self.saved_state.root_dir, dest): raise isolated_format.MappingError( 'Path variable %s=%r points outside the inferred root directory ' '%s; %s' % (k, v, self.saved_state.root_dir, dest)) # Normalize the files based to self.saved_state.root_dir. It is important to # keep the trailing os.path.sep at that step. infiles = [ file_path.relpath( file_path.normpath(os.path.join(isolate_cmd_dir, f)), self.saved_state.root_dir) for f in infiles ] follow_symlinks = sys.platform != 'win32' # Expand the directories by listing each file inside. Up to now, trailing # os.path.sep must be kept. infiles = isolated_format.expand_directories_and_symlinks( self.saved_state.root_dir, infiles, tools.gen_blacklist(blacklist), follow_symlinks, ignore_broken_items) # Finally, update the new data to be able to generate the foo.isolated file, # the file that is used by run_isolated.py. self.saved_state.update_isolated(command, infiles, read_only, relative_cwd) logging.debug(self)
def assertFile(self, path, contents): self.assertTrue(fs.exists(path), 'File %s doesn\'t exist!' % path) self.assertMultiLineEqual(contents, fs.open(path, 'rb').read())
def read_file(path): with fs.open(path, 'rb') as f: return f.read()
def write_file(path, contents): with fs.open(path, 'wb') as f: f.write(contents)
def load_isolate(self, cwd, isolate_file, path_variables, config_variables, extra_variables, blacklist, ignore_broken_items, collapse_symlinks): """Updates self.isolated and self.saved_state with information loaded from a .isolate file. Processes the loaded data, deduce root_dir, relative_cwd. """ # Make sure to not depend on os.getcwd(). assert os.path.isabs(isolate_file), isolate_file isolate_file = file_path.get_native_path_case(isolate_file) logging.info('CompleteState.load_isolate(%s, %s, %s, %s, %s, %s, %s)', cwd, isolate_file, path_variables, config_variables, extra_variables, ignore_broken_items, collapse_symlinks) # Config variables are not affected by the paths and must be used to # retrieve the paths, so update them first. self.saved_state.update_config(config_variables) with fs.open(isolate_file, 'r') as f: # At that point, variables are not replaced yet in command and infiles. # infiles may contain directory entries and is in posix style. command, infiles, read_only, isolate_cmd_dir = ( isolate_format.load_isolate_for_config( os.path.dirname(isolate_file), f.read(), self.saved_state.config_variables)) # Processes the variables with the new found relative root. Note that 'cwd' # is used when path variables are used. path_variables = normalize_path_variables(cwd, path_variables, isolate_cmd_dir) # Update the rest of the saved state. self.saved_state.update(isolate_file, path_variables, extra_variables) total_variables = self.saved_state.path_variables.copy() total_variables.update(self.saved_state.config_variables) total_variables.update(self.saved_state.extra_variables) command = [ isolate_format.eval_variables(i, total_variables) for i in command ] total_variables = self.saved_state.path_variables.copy() total_variables.update(self.saved_state.extra_variables) infiles = [ isolate_format.eval_variables(f, total_variables) for f in infiles ] # root_dir is automatically determined by the deepest root accessed with the # form '../../foo/bar'. Note that path variables must be taken in account # too, add them as if they were input files. self.saved_state.root_dir = isolate_format.determine_root_dir( isolate_cmd_dir, infiles + self.saved_state.path_variables.values()) # The relative directory is automatically determined by the relative path # between root_dir and the directory containing the .isolate file, # isolate_base_dir. relative_cwd = os.path.relpath(isolate_cmd_dir, self.saved_state.root_dir) # Now that we know where the root is, check that the path_variables point # inside it. for k, v in self.saved_state.path_variables.iteritems(): dest = os.path.join(isolate_cmd_dir, relative_cwd, v) if not file_path.path_starts_with(self.saved_state.root_dir, dest): raise isolated_format.MappingError( 'Path variable %s=%r points outside the inferred root directory ' '%s; %s' % (k, v, self.saved_state.root_dir, dest)) # Normalize the files based to self.saved_state.root_dir. It is important to # keep the trailing os.path.sep at that step. infiles = [ file_path.relpath( file_path.normpath(os.path.join(isolate_cmd_dir, f)), self.saved_state.root_dir) for f in infiles ] follow_symlinks = False if not collapse_symlinks: follow_symlinks = sys.platform != 'win32' # Expand the directories by listing each file inside. Up to now, trailing # os.path.sep must be kept. infiles = isolated_format.expand_directories_and_symlinks( self.saved_state.root_dir, infiles, tools.gen_blacklist(blacklist), follow_symlinks, ignore_broken_items) # Finally, update the new data to be able to generate the foo.isolated file, # the file that is used by run_isolated.py. self.saved_state.update_isolated(command, infiles, read_only, relative_cwd) logging.debug(self)