def __init__(self, storage, use_cached_results=True, cache_results=True, print_url=None, check_call=None): """Constructor. Args: storage: An storage layer to read/write from (GSDStorage). use_cached_results: Flag indicating that cached computation results should be used when possible. cache_results: Flag that indicates if successful computations should be written to the cache. print_url: Function that accepts an URL for printing the build result, or None. check_call: A testing hook for allowing build commands to be intercepted. Same interface as subprocess.check_call. """ if check_call is None: check_call = log_tools.CheckCall self._storage = storage self._directory_storage = directory_storage.DirectoryStorageAdapter( storage) self._use_cached_results = use_cached_results self._cache_results = cache_results self._print_url = print_url self._check_call = check_call
def test_BadWrite(self): def call(cmd): return 1 storage = gsd_storage.GSDStorage(gsutil=['mygsutil'], write_bucket='mybucket', read_buckets=[], call=call) dir_storage = directory_storage.DirectoryStorageAdapter(storage) # Check that storage exceptions come thru on failure. with working_directory.TemporaryWorkingDirectory() as work_dir: temp1 = os.path.join(work_dir, 'temp1') hashing_tools_test.GenerateTestTree('bad_write', temp1) self.assertRaises(gsd_storage.GSDStorageError, dir_storage.PutDirectory, temp1, 'bad')
def test_RecomputeHashMatches(self): # Test that things don't get stored to the output cache if they exist # already. with working_directory.TemporaryWorkingDirectory() as work_dir: # Setup test data in input0, input1 using memory storage. self.GenerateTestData('RecomputeHashMatches', work_dir) fs = fake_storage.FakeStorage() ds = directory_storage.DirectoryStorageAdapter(storage=fs) o = once.Once(storage=fs) # Run the computation (compute the length of a file) from input0 to # output0. o.Run('test', self._input_dirs, self._output_dirs[0], [ self.FileLength( '%(input0)s/in0', '%(output)s/out', cwd=work_dir) ]) # Check that 2 writes have occurred. One to write a mapping from in->out, # and one for the output data. self.assertEquals(2, fs.WriteCount()) # Run the computation again from input1 to output1. # (These should have the same length.) o.Run('test', self._input_dirs, self._output_dirs[1], [ self.FileLength( '%(input1)s/in1', '%(output)s/out', cwd=work_dir) ]) # Write count goes up by one as an in->out hash is added, # but no new output is stored (as it is the same). self.assertEquals(3, fs.WriteCount()) # Check that the test is still valid: # - in0 and in1 have equal length. # - out0 and out1 have that length in them. # - out0 and out1 agree. self.assertEquals( str(len(file_tools.ReadFile(self._input_files[0]))), file_tools.ReadFile(self._output_files[0])) self.assertEquals( str(len(file_tools.ReadFile(self._input_files[1]))), file_tools.ReadFile(self._output_files[1])) self.assertEquals(file_tools.ReadFile(self._output_files[0]), file_tools.ReadFile(self._output_files[1]))
def __init__(self, storage, use_cached_results=True, cache_results=True, print_url=None, system_summary=None): """Constructor. Args: storage: An storage layer to read/write from (GSDStorage). use_cached_results: Flag indicating that cached computation results should be used when possible. cache_results: Flag that indicates if successful computations should be written to the cache. print_url: Function that accepts an URL for printing the build result, or None. """ self._storage = storage self._directory_storage = directory_storage.DirectoryStorageAdapter( storage) self._use_cached_results = use_cached_results self._cache_results = cache_results self._print_url = print_url self._system_summary = system_summary
def setUp(self): storage = fake_storage.FakeStorage() self._dir_storage = directory_storage.DirectoryStorageAdapter(storage)