def test_save_isolated_good_long_size(self): calls = [] self.mock(tools, 'write_json', lambda *x: calls.append(x)) data = { u'algo': 'sha-1', u'files': { u'b': { u'm': 123, u'h': u'0123456789abcdef0123456789abcdef01234567', u's': 2181582786, } }, } isolated_format.save_isolated('foo', data) self.assertEqual([('foo', data, True)], calls)
def chromium_save_isolated(isolated, data, path_variables, algo): """Writes one or many .isolated files. This slightly increases the cold cache cost but greatly reduce the warm cache cost by splitting low-churn files off the master .isolated file. It also reduces overall isolateserver memcache consumption. """ slaves = [] def extract_into_included_isolated(prefix): new_slave = {"algo": data["algo"], "files": {}, "version": data["version"]} for f in data["files"].keys(): if f.startswith(prefix): new_slave["files"][f] = data["files"].pop(f) if new_slave["files"]: slaves.append(new_slave) # Split test/data/ in its own .isolated file. extract_into_included_isolated(os.path.join("test", "data", "")) # Split everything out of PRODUCT_DIR in its own .isolated file. if path_variables.get("PRODUCT_DIR"): extract_into_included_isolated(path_variables["PRODUCT_DIR"]) files = [] for index, f in enumerate(slaves): slavepath = isolated[: -len(".isolated")] + ".%d.isolated" % index tools.write_json(slavepath, f, True) data.setdefault("includes", []).append(isolated_format.hash_file(slavepath, algo)) files.append(os.path.basename(slavepath)) files.extend(isolated_format.save_isolated(isolated, data)) return files
def chromium_save_isolated(isolated, data, path_variables, algo): """Writes one or many .isolated files. This slightly increases the cold cache cost but greatly reduce the warm cache cost by splitting low-churn files off the master .isolated file. It also reduces overall isolateserver memcache consumption. """ slaves = [] def extract_into_included_isolated(prefix): new_slave = { 'algo': data['algo'], 'files': {}, 'version': data['version'], } for f in data['files'].keys(): if f.startswith(prefix): new_slave['files'][f] = data['files'].pop(f) if new_slave['files']: slaves.append(new_slave) # Split test/data/ in its own .isolated file. extract_into_included_isolated(os.path.join('test', 'data', '')) # Split everything out of PRODUCT_DIR in its own .isolated file. if path_variables.get('PRODUCT_DIR'): extract_into_included_isolated(path_variables['PRODUCT_DIR']) files = [] for index, f in enumerate(slaves): slavepath = isolated[:-len('.isolated')] + '.%d.isolated' % index tools.write_json(slavepath, f, True) data.setdefault('includes', []).append( isolated_format.hash_file(slavepath, algo)) files.append(os.path.basename(slavepath)) isolated_format.save_isolated(isolated, data) return files
def chromium_save_isolated(isolated, data, path_variables, algo): """Writes one or many .isolated files. This slightly increases the cold cache cost but greatly reduce the warm cache cost by splitting low-churn files off the master .isolated file. It also reduces overall isolateserver memcache consumption. """ slaves = [] def extract_into_included_isolated(prefix): new_slave = { 'algo': data['algo'], 'files': {}, 'version': data['version'], } for f in data['files'].keys(): if f.startswith(prefix): new_slave['files'][f] = data['files'].pop(f) if new_slave['files']: slaves.append(new_slave) # Split test/data/ in its own .isolated file. extract_into_included_isolated(os.path.join('test', 'data', '')) # Split everything out of PRODUCT_DIR in its own .isolated file. if path_variables.get('PRODUCT_DIR'): extract_into_included_isolated(path_variables['PRODUCT_DIR']) files = [] for index, f in enumerate(slaves): slavepath = isolated[:-len('.isolated')] + '.%d.isolated' % index tools.write_json(slavepath, f, True) data.setdefault('includes', []).append( isolated_format.hash_file(slavepath, algo)) files.append(os.path.basename(slavepath)) files.extend(isolated_format.save_isolated(isolated, data)) return files
actual = isolated_format.load_isolated(json.dumps(data), isolateserver_mock.ALGO) expected = gen_data(os.path.sep) self.assertEqual(expected, actual) def test_save_isolated_good_long_size(self): calls = [] self.mock(tools, 'write_json', lambda *x: calls.append(x)) data = { u'algo': 'sha-1', u'files': { u'b': { u'm': 123, u'h': u'0123456789abcdef0123456789abcdef01234567', u's': 2181582786L, } }, } m = isolated_format.save_isolated('foo', data) self.assertEqual([], m) self.assertEqual([('foo', data, True)], calls) if __name__ == '__main__': fix_encoding.fix_encoding() if '-v' in sys.argv: unittest.TestCase.maxDiff = None logging.basicConfig( level=(logging.DEBUG if '-v' in sys.argv else logging.ERROR)) unittest.main()
actual = isolated_format.load_isolated( json.dumps(data), isolateserver_mock.ALGO) expected = gen_data(os.path.sep) self.assertEqual(expected, actual) def test_save_isolated_good_long_size(self): calls = [] self.mock(tools, 'write_json', lambda *x: calls.append(x)) data = { u'algo': 'sha-1', u'files': { u'b': { u'm': 123, u'h': u'0123456789abcdef0123456789abcdef01234567', u's': 2181582786L, } }, } m = isolated_format.save_isolated('foo', data) self.assertEqual([], m) self.assertEqual([('foo', data, True)], calls) if __name__ == '__main__': fix_encoding.fix_encoding() if '-v' in sys.argv: unittest.TestCase.maxDiff = None logging.basicConfig( level=(logging.DEBUG if '-v' in sys.argv else logging.ERROR)) unittest.main()