def _download_given_files(self, files): """Tries to download the files from the server.""" args = ['download', '--target', self.rootdir] file_hashes = [isolated_format.hash_file(f, hashlib.sha1) for f in files] for f in file_hashes: args.extend(['--file', f, f]) self._run(args) # Assert the files are present. actual = [ isolated_format.hash_file(os.path.join(self.rootdir, f), hashlib.sha1) for f in os.listdir(self.rootdir) ] self.assertEqual(sorted(file_hashes), sorted(actual))
def _download_given_files(self, files): """Tries to download the files from the server.""" args = ['download', '--target', self.rootdir] file_hashes = [isolated_format.hash_file(f, hashlib.sha1) for f in files] for f in file_hashes: args.extend(['--file', f, f]) self._run(args) # Assert the files are present. actual = [ isolated_format.hash_file(os.path.join(self.rootdir, f), hashlib.sha1) for f in os.listdir(self.rootdir) ] self.assertEqual(sorted(file_hashes), sorted(actual))
def chromium_save_isolated(isolated, data, path_variables, algo): """Writes one or many .isolated files. This slightly increases the cold cache cost but greatly reduce the warm cache cost by splitting low-churn files off the master .isolated file. It also reduces overall isolateserver memcache consumption. """ slaves = [] def extract_into_included_isolated(prefix): new_slave = {"algo": data["algo"], "files": {}, "version": data["version"]} for f in data["files"].keys(): if f.startswith(prefix): new_slave["files"][f] = data["files"].pop(f) if new_slave["files"]: slaves.append(new_slave) # Split test/data/ in its own .isolated file. extract_into_included_isolated(os.path.join("test", "data", "")) # Split everything out of PRODUCT_DIR in its own .isolated file. if path_variables.get("PRODUCT_DIR"): extract_into_included_isolated(path_variables["PRODUCT_DIR"]) files = [] for index, f in enumerate(slaves): slavepath = isolated[: -len(".isolated")] + ".%d.isolated" % index tools.write_json(slavepath, f, True) data.setdefault("includes", []).append(isolated_format.hash_file(slavepath, algo)) files.append(os.path.basename(slavepath)) files.extend(isolated_format.save_isolated(isolated, data)) return files
def isolated_to_hash(arg, algo): """Archives a .isolated file if needed. Returns the file hash to trigger and a bool specifying if it was a file (True) or a hash (False). """ if arg.endswith(".isolated"): file_hash = isolated_format.hash_file(arg, algo) if not file_hash: on_error.report("Archival failure %s" % arg) return None, True return file_hash, True elif isolated_format.is_valid_hash(arg, algo): return arg, False else: on_error.report("Invalid hash %s" % arg) return None, False
def isolated_archive(isolate_server, namespace, isolated, algo, verbose): """Archives a .isolated and all the dependencies on the Isolate Server.""" logging.info( 'isolated_archive(%s, %s, %s)', isolate_server, namespace, isolated) print('Archiving: %s' % isolated) cmd = [ sys.executable, os.path.join(ROOT_DIR, 'isolate.py'), 'archive', '--isolate-server', isolate_server, '--namespace', namespace, '--isolated', isolated, ] cmd.extend(['--verbose'] * verbose) logging.info(' '.join(cmd)) if subprocess.call(cmd, verbose): return None return isolated_format.hash_file(isolated, algo)
def _gen_files(self, read_only, empty_file, with_time): """Returns a dict of files like calling isolate.files_to_metadata() on each file. Arguments: - read_only: Mark all the 'm' modes without the writeable bit. - empty_file: Add a specific empty file (size 0). - with_time: Include 't' timestamps. For saved state .state files. """ root_dir = self.isolate_dir if RELATIVE_CWD[self.case()] == '.': root_dir = os.path.join(root_dir, 'tests', 'isolate') files = {unicode(f): {} for f in DEPENDENCIES[self.case()][1]} for relfile, v in files.iteritems(): filepath = os.path.join(root_dir, relfile) filestats = os.lstat(filepath) is_link = stat.S_ISLNK(filestats.st_mode) if not is_link: v[u's'] = int(filestats.st_size) if sys.platform != 'win32': v[u'm'] = _fix_file_mode(relfile, read_only) if with_time: # Used to skip recalculating the hash. Use the most recent update # time. v[u't'] = int(round(filestats.st_mtime)) if is_link: v[u'l'] = os.readlink(filepath) # pylint: disable=E1101 else: # Upgrade the value to unicode so diffing the structure in case of # test failure is easier, since the basestring type must match, # str!=unicode. v[u'h'] = unicode(isolated_format.hash_file(filepath, ALGO)) if empty_file: item = files[empty_file] item['h'] = unicode(HASH_NULL) if sys.platform != 'win32': item['m'] = 0400 item['s'] = 0 if with_time: item.pop('t', None) return files
def files_to_metadata(self, subdir, collapse_symlinks): """Updates self.saved_state.files with the files' mode and hash. If |subdir| is specified, filters to a subdirectory. The resulting .isolated file is tainted. See isolated_format.file_to_metadata() for more information. """ for infile in sorted(self.saved_state.files): if subdir and not infile.startswith(subdir): self.saved_state.files.pop(infile) else: filepath = os.path.join(self.root_dir, infile) # This code used to try to reuse the previous data if possible in # saved_state. This performance optimization is not done anymore. This # code is going away soon and shouldn't be used in new code. meta = isolated_format.file_to_metadata( filepath, collapse_symlinks) if 'l' not in meta: meta['h'] = isolated_format.hash_file(filepath, self.saved_state.algo) self.saved_state.files[infile] = meta
def chromium_save_isolated(isolated, data, path_variables, algo): """Writes one or many .isolated files. This slightly increases the cold cache cost but greatly reduce the warm cache cost by splitting low-churn files off the master .isolated file. It also reduces overall isolateserver memcache consumption. """ slaves = [] def extract_into_included_isolated(prefix): new_slave = { 'algo': data['algo'], 'files': {}, 'version': data['version'], } for f in data['files'].keys(): if f.startswith(prefix): new_slave['files'][f] = data['files'].pop(f) if new_slave['files']: slaves.append(new_slave) # Split test/data/ in its own .isolated file. extract_into_included_isolated(os.path.join('test', 'data', '')) # Split everything out of PRODUCT_DIR in its own .isolated file. if path_variables.get('PRODUCT_DIR'): extract_into_included_isolated(path_variables['PRODUCT_DIR']) files = [] for index, f in enumerate(slaves): slavepath = isolated[:-len('.isolated')] + '.%d.isolated' % index tools.write_json(slavepath, f, True) data.setdefault('includes', []).append(isolated_format.hash_file(slavepath, algo)) files.append(os.path.basename(slavepath)) files.extend(isolated_format.save_isolated(isolated, data)) return files
def chromium_save_isolated(isolated, data, path_variables, algo): """Writes one or many .isolated files. This slightly increases the cold cache cost but greatly reduce the warm cache cost by splitting low-churn files off the master .isolated file. It also reduces overall isolateserver memcache consumption. """ slaves = [] def extract_into_included_isolated(prefix): new_slave = { 'algo': data['algo'], 'files': {}, 'version': data['version'], } for f in data['files'].keys(): if f.startswith(prefix): new_slave['files'][f] = data['files'].pop(f) if new_slave['files']: slaves.append(new_slave) # Split test/data/ in its own .isolated file. extract_into_included_isolated(os.path.join('test', 'data', '')) # Split everything out of PRODUCT_DIR in its own .isolated file. if path_variables.get('PRODUCT_DIR'): extract_into_included_isolated(path_variables['PRODUCT_DIR']) files = [] for index, f in enumerate(slaves): slavepath = isolated[:-len('.isolated')] + '.%d.isolated' % index tools.write_json(slavepath, f, True) data.setdefault('includes', []).append( isolated_format.hash_file(slavepath, algo)) files.append(os.path.basename(slavepath)) files.extend(isolated_format.save_isolated(isolated, data)) return files
def test_file_to_metadata_path_case_collapse(self): # Ensure setting the collapse_symlink option doesn't include the symlinks basedir = os.path.join(self.cwd, u'basedir') fs.mkdir(basedir) subdir = os.path.join(basedir, u'subdir') fs.mkdir(subdir) linkdir = os.path.join(basedir, u'linkdir') fs.mkdir(linkdir) foo_file = os.path.join(subdir, u'Foo.txt') fs.open(foo_file, 'w').close() sym_file = os.path.join(basedir, u'linkdir', u'Sym.txt') fs.symlink('../subdir/Foo.txt', sym_file) actual = isolated_format.file_to_metadata(sym_file, True, True) actual['h'] = isolated_format.hash_file(sym_file, ALGO) expected = { # SHA-1 of empty string 'h': 'da39a3ee5e6b4b0d3255bfef95601890afd80709', 'm': 256, 's': 0, } self.assertEqual(expected, actual)
is_link = stat.S_ISLNK(filestats.st_mode) if not is_link: v[u's'] = int(filestats.st_size) if sys.platform != 'win32': v[u'm'] = _fix_file_mode(relfile, read_only) if with_time: # Used to skip recalculating the hash. Use the most recent update # time. v[u't'] = int(round(filestats.st_mtime)) if is_link: v[u'l'] = os.readlink(filepath) # pylint: disable=E1101 else: # Upgrade the value to unicode so diffing the structure in case of # test failure is easier, since the basestring type must match, # str!=unicode. v[u'h'] = unicode(isolated_format.hash_file(filepath, ALGO)) if empty_file: item = files[empty_file] item['h'] = unicode(HASH_NULL) if sys.platform != 'win32': item['m'] = 0400 item['s'] = 0 if with_time: item.pop('t', None) return files def _expected_isolated(self, args, read_only, empty_file): """Verifies self.isolated contains the expected data.""" expected = { u'algo': u'sha-1',
for relfile, v in files.items(): filepath = os.path.join(root_dir, relfile) filestats = os.lstat(filepath) is_link = stat.S_ISLNK(filestats.st_mode) if not is_link: v[u's'] = int(filestats.st_size) if sys.platform != 'win32': v[u'm'] = _fix_file_mode(relfile, read_only) if is_link: v[u'l'] = os.readlink(filepath) else: # Upgrade the value to unicode so diffing the structure in case of # test failure is easier, since the basestring type must match, # str!=unicode. v[u'h'] = six.text_type( isolated_format.hash_file(filepath, ALGO)) if empty_file: item = files[empty_file] item['h'] = six.text_type(HASH_NULL) if sys.platform != 'win32': item['m'] = 0o400 item['s'] = 0 return files def _expected_isolated(self, args, read_only, empty_file): """Verifies self.isolated contains the expected data.""" expected = { u'algo': u'sha-1', u'files': self._gen_files(read_only, empty_file), u'read_only': 1,
def _is_valid_hash(self, digest): """Verify digest with supported hash algos.""" for _, algo in isolated_format.SUPPORTED_ALGOS.items(): if digest == isolated_format.hash_file(self._path(digest), algo): return True return False