def read_version_file(self, build_directory): """ Validate that this PackageDescription from AUTOBUILD_CONFIG_FILE has a version_file attribute referencing a readable file. Read it and return the contained version, else raise AutobuildError. If a legacy AUTOBUILD_CONFIG_FILE contains a version attribute, produce a deprecation warning. """ if self.version: logger.warn( "package_description.version ignored in %s; use version_file instead" % AUTOBUILD_CONFIG_FILE) if not self.version_file: # should never hit this because caller should have already called # check_package_attributes(), but suspenders and belt raise common.AutobuildError("Missing version_file key") version_file = os.path.join(build_directory, self.version_file) try: with open(version_file) as vf: version = vf.read().strip() except IOError as err: raise common.AutobuildError("Can't read version_file '%s': %s" % (self.version_file, err)) if not version: raise common.AutobuildError( "version_file '%s' contains no version info" % self.version_file) return version
def __load(self, path): # circular imports, sorry, must import update locally import update if os.path.isabs(path): self.path = path else: abs_path = os.path.abspath(path) found_path = common.search_up_for_file(abs_path) if found_path is not None: self.path = found_path else: self.path = abs_path if os.path.isfile(self.path): autobuild_xml = file(self.path, 'rb').read() if not autobuild_xml: logger.warn("Configuration file '%s' is empty" % self.path) return try: saved_data = llsd.parse(autobuild_xml) except llsd.LLSDParseError: raise common.AutobuildError( "Configuration file %s is corrupt. Aborting..." % self.path) saved_data, orig_ver = update.convert_to_current( self.path, saved_data) # Presumably this check comes after format-version updates because # at some point in paleontological history the file format did not # include "type". if saved_data.get("type", None) != 'autobuild': raise common.AutobuildError( self.path + ' not an autobuild configuration file') self.__init_from_dict(saved_data) logger.debug("Configuration file '%s'" % self.path) if orig_ver: logger.warn("Saving configuration file %s in format %s" % (self.path, AUTOBUILD_CONFIG_VERSION)) self.save() # We don't want orig_ver to appear in the saved file: that's # for internal use only. But we do want to track it because # there are those who care what kind of file we originally # read. self["orig_ver"] = orig_ver elif not os.path.exists(self.path): logger.warn("Configuration file '%s' not found" % self.path) else: raise ConfigurationError("cannot create configuration file %s" % self.path)
def __extract_zip_archive(cachename, install_dir, exclude=[]): zip_archive = zipfile.ZipFile(cachename, 'r') extract = [member for member in zip_archive.namelist() if member not in exclude] conflicts = [member for member in extract if os.path.exists(os.path.join(install_dir, member)) and not os.path.isdir(os.path.join(install_dir, member))] if conflicts: raise common.AutobuildError("conflicting files:\n "+'\n '.join(conflicts)) zip_archive.extractall(path=install_dir, members=extract) return extract
def __load(self, path=None): if os.path.isabs(path): self.path = path else: abs_path = os.path.abspath(path) found_path = common.search_up_for_file(abs_path) if found_path is not None: self.path = found_path else: self.path = abs_path if os.path.isfile(self.path): installed_xml = file(self.path, 'rb').read() if not installed_xml: logger.warn("Installed file '%s' is empty" % self.path) return logger.debug("Installed file '%s'" % self.path) try: saved_data = llsd.parse(installed_xml) except llsd.LLSDParseError: raise common.AutobuildError( "Installed file %s is not valid. Aborting..." % self.path) if not (('version' in saved_data and saved_data['version'] == self.version) and ('type' in saved_data) and (saved_data['type'] == AUTOBUILD_INSTALLED_TYPE)): raise common.AutobuildError( self.path + ' is not compatible with this version of autobuild.' + '\nClearing your build directory and rebuilding should correct it.' ) dependencies = saved_data.pop('dependencies', {}) for (name, package) in dependencies.iteritems(): self.dependencies[name] = package self.update(saved_data) elif not os.path.exists(self.path): logger.info("Installed packages file '%s' not found; creating." % self.path) else: raise ConfigurationError( "cannot create installed packages file %s" % self.path)
def __extract_tar_file(cachename, install_dir, exclude=[]): # Attempt to extract the package from the install cache tar = tarfile.open(cachename, 'r') extract = [ member for member in tar.getmembers() if member.name not in exclude ] conflicts = [ member.name for member in extract if os.path.exists(os.path.join(install_dir, member.name)) and not os.path.isdir(os.path.join(install_dir, member.name)) ] if conflicts: raise common.AutobuildError("conflicting files:\n " + '\n '.join(conflicts)) tar.extractall(path=install_dir, members=extract) return [member.name for member in extract]
def clean_files(install_dir, files): # Tarballs that name directories name them before the files they contain, # so the unpacker will create the directory before creating files in it. # For exactly that reason, we must remove things in reverse order. logger.debug("uninstalling from '%s'" % install_dir) directories = set() # directories we've removed files from for filename in files: install_path = os.path.join(install_dir, filename) if not os.path.isdir( install_path): # deal with directories below, after all files try: os.remove(install_path) # We used to print "removing f" before the call above, the # assumption being that we'd either succeed or produce a # traceback. But there are a couple different ways we could get # through this logic without actually deleting. So produce a # message only when we're sure we've actually deleted something. logger.info(" removed " + filename) except OSError as err: if err.errno == errno.ENOENT: # this file has already been deleted for some reason -- fine logger.warning(" expected file not found: " + install_path) pass else: raise common.AutobuildError(str(err)) directories.add(os.path.dirname(filename)) # Check to see if any of the directories from which we removed files are now # empty; if so, delete them (they will not have been listed in the manifest). # Do the checks in descending length order so that subdirectories will appear # before their containing directory. The loop is nested in order to clean up # directories that previously contained only subdirectories, so they were not # added to the list when deleting files above. while directories: parents = set() for dirname in sorted(directories, cmp=lambda x, y: cmp(len(y), len(x))): dir_path = os.path.join(install_dir, dirname) if os.path.exists(dir_path) and not os.listdir(dir_path): os.rmdir(dir_path) logger.info(" removed " + dirname) parent = os.path.dirname(dirname) if dir_path: parents.add(parent) directories = parents
def __init__(self, path=None, stream=None, parsed_llsd=None, convert_platform=None, create_quietly=False): self.version = AUTOBUILD_METADATA_VERSION self.type = AUTOBUILD_METADATA_TYPE self.build_id = None self.platform = None self.configuration = None self.package_description = None self.manifest = [] self.dependencies = {} self.archive = None self.install_type = None self.install_dir = None self.dirty = False metadata_xml = None if path: self.path = path if os.path.isfile(self.path): metadata_xml = file(self.path, 'rb').read() if not metadata_xml: logger.warn("Metadata file '%s' is empty" % self.path) self.dirty = False return elif not os.path.exists(self.path): if not create_quietly: logger.warn("Configuration file '%s' not found" % self.path) elif stream: metadata_xml = stream.read() if metadata_xml: try: parsed_llsd = llsd.parse(metadata_xml) except llsd.LLSDParseError: raise common.AutobuildError( "Metadata file %s is corrupt. Aborting..." % self.path) if parsed_llsd: self.__load(parsed_llsd) self.update(parsed_llsd)
def set_recursive_loglevel(self, logger, level): """ Sets the logger level, and also saves the equivalent option argument in the AUTOBUILD_LOGLEVEL environment variable so that any recursive invocation of autobuild uses the same level """ logger.setLevel(level) if level == logging.ERROR: os.environ[AUTOBUILD_LOGLEVEL] = '--quiet' elif level == logging.WARNING: os.environ[AUTOBUILD_LOGLEVEL] = '' elif level == logging.INFO: os.environ[AUTOBUILD_LOGLEVEL] = '--verbose' elif level == logging.DEBUG: os.environ[AUTOBUILD_LOGLEVEL] = '--debug' else: raise common.AutobuildError("invalid effective log level %s" % logging.getLevelName(level))
def __extract_tar_file(cachename, install_dir, exclude=[]): # Attempt to extract the package from the install cache try: from cStringIO import StringIO as BIO except ImportError: # python 3 from io import BytesIO as BIO filedata = None if ".tar.xz" in cachename: try: import lzma except ImportError: from backports import lzma with lzma.open(cachename, "r") as f: filedata = f.read() f.close() elif ".tar.bz2" in cachename: import bz2 with bz2.BZ2File(cachename, "r") as f: filedata = f.read() f.close() elif ".tar.gz" in cachename: import gzip with gzip.open(cachename, "r") as f: filedata = f.read() f.close() file_in = BIO(filedata) tar = tarfile.open(fileobj=file_in, mode='r') extract = [member for member in tar.getmembers() if member.name not in exclude] conflicts = [member.name for member in extract if os.path.exists(os.path.join(install_dir, member.name)) and not os.path.isdir(os.path.join(install_dir, member.name))] if conflicts: raise common.AutobuildError("conflicting files:\n "+'\n '.join(conflicts)) tar.extractall(path=install_dir, members=extract) return [member.name for member in extract]