def _load(self): """Loads state of the cache from json file.""" self._lock.assert_locked() if not os.path.isdir(self.cache_dir): os.makedirs(self.cache_dir) else: # Make sure the cache is read-only. # TODO(maruel): Calculate the cost and optimize the performance # accordingly. make_tree_read_only(self.cache_dir) # Load state of the cache. if os.path.isfile(self.state_file): try: self._lru = lru.LRUDict.load(self.state_file) except ValueError as err: logging.error('Failed to load cache state: %s' % (err, )) # Don't want to keep broken state file. try_remove(self.state_file) # Ensure that all files listed in the state still exist and add new ones. previous = self._lru.keys_set() unknown = [] for filename in os.listdir(self.cache_dir): if filename == self.STATE_FILE: continue if filename in previous: previous.remove(filename) continue # An untracked file. if not isolateserver.is_valid_hash(filename, self.hash_algo): logging.warning('Removing unknown file %s from cache', filename) try_remove(self._path(filename)) continue # File that's not referenced in 'state.json'. # TODO(vadimsh): Verify its SHA1 matches file name. logging.warning('Adding unknown file %s to cache', filename) unknown.append(filename) if unknown: # Add as oldest files. They will be deleted eventually if not accessed. self._add_oldest_list(unknown) logging.warning('Added back %d unknown files', len(unknown)) if previous: # Filter out entries that were not found. logging.warning('Removed %d lost files', len(previous)) for filename in previous: self._lru.pop(filename) self._trim()
def _load(self): """Loads state of the cache from json file.""" self._lock.assert_locked() if not os.path.isdir(self.cache_dir): os.makedirs(self.cache_dir) else: # Make sure the cache is read-only. # TODO(maruel): Calculate the cost and optimize the performance # accordingly. make_tree_read_only(self.cache_dir) # Load state of the cache. if os.path.isfile(self.state_file): try: self._lru = lru.LRUDict.load(self.state_file) except ValueError as err: logging.error('Failed to load cache state: %s' % (err,)) # Don't want to keep broken state file. try_remove(self.state_file) # Ensure that all files listed in the state still exist and add new ones. previous = self._lru.keys_set() unknown = [] for filename in os.listdir(self.cache_dir): if filename == self.STATE_FILE: continue if filename in previous: previous.remove(filename) continue # An untracked file. if not isolateserver.is_valid_hash(filename, self.algo): logging.warning('Removing unknown file %s from cache', filename) try_remove(self._path(filename)) continue # File that's not referenced in 'state.json'. # TODO(vadimsh): Verify its SHA1 matches file name. logging.warning('Adding unknown file %s to cache', filename) unknown.append(filename) if unknown: # Add as oldest files. They will be deleted eventually if not accessed. self._add_oldest_list(unknown) logging.warning('Added back %d unknown files', len(unknown)) if previous: # Filter out entries that were not found. logging.warning('Removed %d lost files', len(previous)) for filename in previous: self._lru.pop(filename) self._trim()
def isolated_to_hash(isolate_server, namespace, arg, algo, verbose): """Archives a .isolated file if needed. Returns the file hash to trigger and a bool specifying if it was a file (True) or a hash (False). """ if arg.endswith('.isolated'): file_hash = archive(isolate_server, namespace, arg, algo, verbose) if not file_hash: tools.report_error('Archival failure %s' % arg) return None, True return file_hash, True elif isolateserver.is_valid_hash(arg, algo): return arg, False else: tools.report_error('Invalid hash %s' % arg) return None, False
def isolated_to_hash(isolate_server, namespace, arg, algo, verbose): """Archives a .isolated file if needed. Returns the file hash to trigger and a bool specifying if it was a file (True) or a hash (False). """ if arg.endswith('.isolated'): file_hash = archive(isolate_server, namespace, arg, algo, verbose) if not file_hash: on_error.report('Archival failure %s' % arg) return None, True return file_hash, True elif isolateserver.is_valid_hash(arg, algo): return arg, False else: on_error.report('Invalid hash %s' % arg) return None, False
def process_manifest( file_hash_or_isolated, test_name, shards, test_filter, slave_os, working_dir, isolate_server, swarming, verbose, profile, priority, algo): """Process the manifest file and send off the swarm test request. Optionally archives an .isolated file. """ if file_hash_or_isolated.endswith('.isolated'): file_hash = archive( file_hash_or_isolated, isolate_server, slave_os, algo, verbose) if not file_hash: print >> sys.stderr, 'Archival failure %s' % file_hash_or_isolated return 1 elif isolateserver.is_valid_hash(file_hash_or_isolated, algo): file_hash = file_hash_or_isolated else: print >> sys.stderr, 'Invalid hash %s' % file_hash_or_isolated return 1 try: manifest = Manifest( file_hash, test_name, shards, test_filter, PLATFORM_MAPPING_SWARMING[slave_os], working_dir, isolate_server, verbose, profile, priority, algo) except ValueError as e: print >> sys.stderr, 'Unable to process %s: %s' % (test_name, e) return 1 chromium_setup(manifest) # Zip up relevant files. print('Zipping up files...') if not manifest.zip_and_upload(): return 1 # Send test requests off to swarm. print('Sending test requests to swarm.') print('Server: %s' % swarming) print('Job name: %s' % test_name) test_url = swarming + '/test' manifest_text = manifest.to_json() result = net.url_read(test_url, data={'request': manifest_text}) if not result: print >> sys.stderr, 'Failed to send test for %s\n%s' % ( test_name, test_url) return 1 try: json.loads(result) except (ValueError, TypeError) as e: print >> sys.stderr, 'Failed to send test for %s' % test_name print >> sys.stderr, 'Manifest: %s' % manifest_text print >> sys.stderr, 'Bad response: %s' % result print >> sys.stderr, str(e) return 1 return 0