def _should_install_sandbox(self): """Checks if the sandbox is correctly installed. If the last check for update is older than `CHECK_INTERVAL`, checks if the sandbox should be updated. """ if not os.path.isdir(self.path): return True try: fixups_file = os.path.join(self.path, '.fixups_applied') if not os.path.exists(fixups_file): return True current_fixups = set(open(fixups_file).read().split()) if not current_fixups.issuperset(self.required_fixups): return True last_check_file = os.path.join(self.path, '.last_check') last_check = int(open(last_check_file, 'rb').read().decode()) now_int = int(time.time()) if last_check + CHECK_INTERVAL > now_int: return False ft_path = _filetracker_path(self.name) ft_client = ft.instance() expected_hash = ft_client.file_version(ft_path) if not expected_hash: raise SandboxError("Server did not return hash for " "the sandbox image '%s'" % self.name) expected_hash = str(expected_hash) hash_file = os.path.join(self.path, '.hash') if not os.path.exists(hash_file): return True hash = open(hash_file, 'rb').read().strip() logger.debug("Comparing hashes: %s vs %s.", expected_hash, hash) if hash != expected_hash: return True # Last check file is updated only after the actual check # confirmed that we are up to date. self._mark_checked() return False except Exception: logger.warning("Failed to check if sandbox is up-to-date", exc_info=True) if os.path.isdir(self.path): # If something fails, but we have the sandbox itself, better do # not try to download it again. self._mark_checked() return False return True
def _get(self): name = self.name path = self.path logger.debug("Sandbox '%s' requested", name) self.lock.lock_shared() if not self._should_install_sandbox(): # Sandbox is ready, so we return and *maintain* the lock # for the lifetime of this object. return self.lock.unlock() self.lock.lock_exclusive() if not self._should_install_sandbox(): self.lock.lock_shared() return logger.info("Downloading sandbox '%s' ...", name) if os.path.exists(path): shutil.rmtree(path) archive_path = path + '.tar.gz' try: ft_path = _filetracker_path(name) ft_client = ft.instance() vname = ft_client.get_file(ft_path, archive_path) version = ft_client.file_version(vname) except Exception, e: logger.warning("Failed to download sandbox from filetracker", exc_info=True) if SANDBOXES_URL: url = SANDBOXES_URL + '/' + _urllib_path(name) logger.info(" trying url: %s", url) local_f = open(archive_path, 'wb') try: http_f = urllib2.urlopen(url) shutil.copyfileobj(http_f, local_f) local_f.close() except: os.unlink(archive_path) raise version = self._parse_last_modified(http_f) else: raise SandboxError("Could not download sandbox '%s'" % (name,))
def _should_install_sandbox(self): try: last_check_file = os.path.join(self.path, '.last_check') last_check = int(open(last_check_file).read()) now_int = int(time.time()) if last_check + CHECK_INTERVAL > now_int: return False ft_path = _filetracker_path(self.name) ft_client = ft.instance() expected_hash = ft_client.file_version(ft_path) if not expected_hash: raise SandboxError("Server did not return hash for " "the sandbox image '%s'" % self.name) hash_file = os.path.join(self.path, '.hash') if not os.path.exists(hash_file): return True hash = open(hash_file, 'rb').read().strip() if hash != expected_hash: return True # Last check file is updated only after the actual check # confirmed that we are up to date. self._mark_checked() return False except (Exception): logger.warning("Failed to check if sandbox is up-to-date", exc_info=True) if os.path.isdir(self.path): # If something fails, but we have the sandbox itself, better do # not try to download it again. self._mark_checked() return False return True
def _get(self): """Downloads and installs the sandbox if it is not installed correctly or should be updated. This function has a bug which causes that simultaneous download of the same sandbox is possible and this causes an error. The bug is that we are deleting whole sandbox directory before we start downloading the sandbox thus we are deleting the `.lock` file inside that directory. """ name = self.name path = self.path logger.debug("Sandbox '%s' requested", name) self.lock.lock_shared() if not self._should_install_sandbox(): # Sandbox is ready, so we return and *maintain* the lock # for the lifetime of this object. return self.lock.unlock() self.lock.lock_exclusive() if not self._should_install_sandbox(): self.lock.lock_shared() return try: logger.info("Downloading sandbox '%s' ...", name) if os.path.exists(path): rmtree(path) archive_path = path + '.tar.gz' try: ft_path = _filetracker_path(name) ft_client = ft.instance() vname = ft_client.get_file(ft_path, archive_path) version = ft_client.file_version(vname) except Exception: logger.warning("Failed to download sandbox from filetracker", exc_info=True) if SANDBOXES_URL: url = SANDBOXES_URL + '/' + _urllib_path(name) logger.info(" trying url: %s", url) local_f = open(archive_path, 'wb') try: http_f = six.moves.urllib.request.urlopen(url) shutil.copyfileobj(http_f, local_f) local_f.close() except: os.unlink(archive_path) raise version = self._parse_last_modified(http_f) else: raise SandboxError("Could not download sandbox '%s'" % (name,)) logger.info(" extracting ...") tar = tarfile.open(archive_path, 'r') tar.extractall(SANDBOXES_BASEDIR) os.unlink(archive_path) if not os.path.isdir(path): raise SandboxError("Downloaded sandbox archive " "did not contain expected directory '%s'" % name) self._apply_fixups() hash_file = os.path.join(path, '.hash') open(hash_file, 'wb').write(str(version)) self._mark_checked() logger.info(" done.") except: self.lock.unlock() raise self.lock.lock_shared()
def _get(self): name = self.name path = self.path logger.debug("Sandbox '%s' requested", name) self.lock.lock_shared() if not self._should_install_sandbox(): # Sandbox is ready, so we return and *maintain* the lock # for the lifetime of this object. return self.lock.unlock() self.lock.lock_exclusive() if not self._should_install_sandbox(): self.lock.lock_shared() return try: logger.info("Downloading sandbox '%s' ...", name) if os.path.exists(path): rmtree(path) archive_path = path + '.tar.gz' try: ft_path = _filetracker_path(name) ft_client = ft.instance() vname = ft_client.get_file(ft_path, archive_path) version = ft_client.file_version(vname) except Exception, e: logger.warning("Failed to download sandbox from filetracker", exc_info=True) if SANDBOXES_URL: url = SANDBOXES_URL + '/' + _urllib_path(name) logger.info(" trying url: %s", url) local_f = open(archive_path, 'wb') try: http_f = urllib2.urlopen(url) shutil.copyfileobj(http_f, local_f) local_f.close() except: os.unlink(archive_path) raise version = self._parse_last_modified(http_f) else: raise SandboxError("Could not download sandbox '%s'" % (name,)) logger.info(" extracting ...") tar = tarfile.open(archive_path, 'r') tar.extractall(SANDBOXES_BASEDIR) os.unlink(archive_path) if not os.path.isdir(path): raise SandboxError("Downloaded sandbox archive " "did not contain expected directory '%s'" % name) self._apply_fixups() hash_file = os.path.join(path, '.hash') open(hash_file, 'wb').write(str(version)) self._mark_checked() logger.info(" done.")