Example #1
0
	def open(self):
		# Initialize database.
		self.db.initialize()

		# Create a progressbar.
		pb = util.make_progress(_("Loading installed packages"), len(self.db))

		# Remove all data from the current index.
		self.index.clear()

		i = 0
		for pkg in self.db.packages:
			if pb:
				i += 1
				pb.update(i)

			self.index.add_package(pkg)

		self.index.optimize()

		if pb:
			pb.finish()

		# Mark repo as open.
		self.opened = True
Example #2
0
    def open(self):
        # Initialize database.
        self.db.initialize()

        # Create a progressbar.
        pb = util.make_progress(_("Loading installed packages"), len(self.db))

        # Remove all data from the current index.
        self.index.clear()

        i = 0
        for pkg in self.db.packages:
            if pb:
                i += 1
                pb.update(i)

            self.index.add_package(pkg)

        self.index.optimize()

        if pb:
            pb.finish()

        # Mark repo as open.
        self.opened = True
Example #3
0
    def add_packages(self, files):
        # Search for possible package files in the paths.
        files = self.search_files(*files)

        # Give up if there are no files to process.
        if not files:
            return

        # Create progress bar.
        pb = util.make_progress(
            _("%s: Adding packages...") % self.name, len(files))
        i = 0

        for file in files:
            if pb:
                i += 1
                pb.update(i)

            # Add the package to the repository.
            self.add_package(file, optimize_index=False)

        # Optimize the index.
        self.optimize_index()

        if pb:
            pb.finish()

        # Optimize the index.
        self.index.optimize()
Example #4
0
	def add_packages(self, files):
		# Search for possible package files in the paths.
		files = self.search_files(*files)

		# Give up if there are no files to process.
		if not files:
			return

		# Create progress bar.
		pb = util.make_progress(_("%s: Adding packages...") % self.name, len(files))
		i = 0

		for file in files:
			if pb:
				i += 1
				pb.update(i)

			# Add the package to the repository.
			self.add_package(file, optimize_index=False)

		# Optimize the index.
		self.optimize_index()

		if pb:
			pb.finish()

		# Optimize the index.
		self.index.optimize()
Example #5
0
    def extract(self, message=None, prefix=None):
        # XXX neeed to make this waaaaaaaaaay better.

        files = self.files

        # Load progressbar.
        pb = None
        if message:
            message = "%-10s : %s" % (message, self.friendly_name)
            pb = util.make_progress(message, len(files), eta=False)

        dir_len = len(os.path.dirname(self.filename))

        # Copy all files that belong to the package
        i = 0
        for f in files:
            if pb:
                i += 1
                pb.update(i)

            _f = f[dir_len:]
            log.debug("%s/%s" % (prefix, _f))

            path = "%s/%s" % (prefix, _f)

            path_dir = os.path.dirname(path)
            if not os.path.exists(path_dir):
                os.makedirs(path_dir)

            shutil.copy2(f, path)

        if pb:
            pb.finish()

        # Download source files.
        for _filename in self.download():
            filename = "%s/files/%s" % (prefix, os.path.basename(_filename))
            dirname = os.path.dirname(filename)

            if not os.path.exists(dirname):
                os.makedirs(dirname)

            shutil.copy2(_filename, filename)
Example #6
0
	def extract(self, message=None, prefix=None):
		# XXX neeed to make this waaaaaaaaaay better.

		files = self.files

		# Load progressbar.
		pb = None
		if message:
			message = "%-10s : %s" % (message, self.friendly_name)
			pb = util.make_progress(message, len(files), eta=False)

		dir_len = len(os.path.dirname(self.filename))

		# Copy all files that belong to the package
		i = 0
		for f in files:
			if pb:
				i += 1
				pb.update(i)

			_f = f[dir_len:]
			log.debug("%s/%s" % (prefix, _f))
	
			path = "%s/%s" % (prefix, _f)

			path_dir = os.path.dirname(path)
			if not os.path.exists(path_dir):
				os.makedirs(path_dir)

			shutil.copy2(f, path)

		if pb:
			pb.finish()

		# Download source files.
		for _filename in self.download():
			filename = "%s/files/%s" % (prefix, os.path.basename(_filename))
			dirname = os.path.dirname(filename)

			if not os.path.exists(dirname):
				os.makedirs(dirname)

			shutil.copy2(_filename, filename)
Example #7
0
	def create_datafile(self):
		# Create a list of all files that have to be put into the
		# package.
		files = []

		# Download all files that go into the package.
		for file in self.pkg.download():
			assert os.path.getsize(file), "Don't package empty files"
			files.append(("files/%s" % os.path.basename(file), file))

		# Add all files in the package directory.
		for file in self.pkg.files:
			files.append((os.path.relpath(file, self.pkg.path), file))

		# Add files in alphabetical order.
		files.sort()

		# Load progressbar.
		message = "%-10s : %s" % (_("Packaging"), self.pkg.friendly_name)
		pb = util.make_progress(message, len(files), eta=False)

		filename = self.mktemp()
		if self.payload_compression == "xz":
			datafile = tar.InnerTarFileXz.open(filename, mode="w")
		else:
			datafile = tar.InnerTarFile.open(filename, mode="w")

		i = 0
		for arcname, file in files:
			if pb:
				i += 1
				pb.update(i)

			datafile.add(file, arcname)
		datafile.close()

		if pb:
			pb.finish()

		return filename
Example #8
0
	def open(self):
		# Find all files in the repository dir.
		files = self.search_files(self.path)

		# Create progress bar.
		pb = util.make_progress(_("%s: Reading packages...") % self.name, len(files))
		i = 0

		# Add all files to the index.
		for file in files:
			if pb:
				i += 1
				pb.update(i)

			pkg = packages.open(self.pakfire, self, file)
			self.index.add_package(pkg)

		if pb:
			pb.finish()

		# Mark repo as open.
		self.opened = True
Example #9
0
    def open(self):
        # Find all files in the repository dir.
        files = self.search_files(self.path)

        # Create progress bar.
        pb = util.make_progress(
            _("%s: Reading packages...") % self.name, len(files))
        i = 0

        # Add all files to the index.
        for file in files:
            if pb:
                i += 1
                pb.update(i)

            pkg = packages.open(self.pakfire, self, file)
            self.index.add_package(pkg)

        if pb:
            pb.finish()

        # Mark repo as open.
        self.opened = True
Example #10
0
	def create_datafile(self):
		includes = []
		excludes = []

		# List of all patterns, which grows.
		patterns = self.pkg.files

		# ...
		orphan_directories = []
		for d in ORPHAN_DIRECTORIES:
			if d.startswith("usr/"):
				b = os.path.basename(d)
				b = os.path.join(self.buildroot, b)

				if os.path.islink(b):
					continue

			d = os.path.join(self.buildroot, d)
			if not os.path.islink(d):
				orphan_directories.append(d)

		for pattern in patterns:
			# Check if we are running in include or exclude mode.
			if pattern.startswith("!"):
				files = excludes

				# Strip the ! character.
				pattern = pattern[1:]
			else:
				files = includes

			# Expand file to point to chroot.
			if pattern.startswith("/"):
				pattern = pattern[1:]
			pattern = os.path.join(self.buildroot, pattern)

			# Recognize the type of the pattern. Patterns could be a glob
			# pattern that is expanded here or just a directory which will
			# be included recursively.
			if "*" in pattern or "?" in pattern or ("[" in pattern and "]" in pattern):
				_patterns = glob.glob(pattern)
			else:
				_patterns = [pattern,]

			for pattern in _patterns:
				# Try to stat the pattern. If that is not successful, we cannot go on.
				try:
					os.lstat(pattern)
				except OSError:
					continue

				# Add directories recursively but skip those symlinks
				# that point to a directory.
				if os.path.isdir(pattern) and not os.path.islink(pattern):
					# Add directory itself.
					files.append(pattern)

					for dir, subdirs, _files in os.walk(pattern):
						for subdir in subdirs:
							if subdir in orphan_directories:
								continue

							subdir = os.path.join(dir, subdir)
							files.append(subdir)

						for file in _files:
							file = os.path.join(dir, file)
							files.append(file)

				# All other files are just added.
				else:
					files.append(pattern)

		files = []
		for file in includes:
			# Skip if file is already in the file set or
			# marked to be excluded from this archive.
			if file in excludes or file in files:
				continue

			# Skip orphan directories.
			if file in orphan_directories and not os.listdir(file):
				log.debug("Found an orphaned directory: %s" % file)
				continue

			files.append(file)

			while True:
				file = os.path.dirname(file)

				if file == self.buildroot:
					break

				if not file in files:
					files.append(file)

		files.sort()

		# Load progressbar.
		message = "%-10s : %s" % (_("Packaging"), self.pkg.friendly_name)
		pb = util.make_progress(message, len(files), eta=False)

		datafile = self.mktemp()
		if self.payload_compression == "xz":
			t = tar.InnerTarFileXz.open(datafile, mode="w")
		else:
			t = tar.InnerTarFile.open(datafile, mode="w")

		# All files in the tarball are relative to this directory.
		basedir = self.buildroot

		i = 0
		for file in files:
			if pb:
				i += 1
				pb.update(i)

			# Never package /.
			if os.path.normpath(file) == os.path.normpath(basedir):
				continue

			# Name of the file in the archive.
			arcname = "/%s" % os.path.relpath(file, basedir)

			# Add file to tarball.
			t.add(file, arcname=arcname, recursive=False)

		# Remove all packaged files.
		for file in reversed(files):
			# It's okay if we cannot remove directories,
			# when they are not empty.
			if os.path.isdir(file):
				try:
					os.rmdir(file)
				except OSError:
					continue
			else:
				try:
					os.unlink(file)
				except OSError:
					pass

			while True:
				file = os.path.dirname(file)

				if not file.startswith(basedir):
					break

				try:
					os.rmdir(file)
				except OSError:
					break

		# Close the tarfile.
		t.close()

		# Finish progressbar.
		if pb:
			pb.finish()

		return datafile
Example #11
0
	def extract(self, message=None, prefix=None):
		log.debug("Extracting package %s" % self.friendly_name)

		if prefix is None:
			prefix = ""

		# Open package data for read.
		payload_archive = self.open_payload_archive()

		# Load progressbar.
		pb = None
		if message:
			message = "%-10s : %s" % (message, self.friendly_name)
			pb = util.make_progress(message, len(self.filelist), eta=False)

		# Collect messages with errors and warnings, that are passed to
		# the user.
		messages = []

		name2file = {}
		for file in self.filelist:
			if file.is_dir() and file.name.endswith("/"):
				name = file.name[:-1]
			else:
				name = file.name

			name2file[name] = file

		i = 0
		while True:
			member = payload_archive.next()
			if not member:
				break

			# Check if file is also known in metadata.
			name = member.name
			if not name.startswith("/"):
				name = "/%s" % name

			try:
				file = name2file[name]
			except KeyError:
				log.warning(_("File in archive is missing in file metadata: %s. Skipping.") % name)
				continue

			# Update progress.
			if pb:
				i += 1
				pb.update(i)

			target = os.path.join(prefix, member.name)

			# Check if a configuration file is already present. We don't want to
			# overwrite that.
			if file.is_config():
				config_save = "%s%s" % (target, CONFIG_FILE_SUFFIX_SAVE)
				config_new  = "%s%s" % (target, CONFIG_FILE_SUFFIX_NEW)

				if os.path.exists(config_save) and not os.path.exists(target):
					# Extract new configuration file, save it as CONFIG_FILE_SUFFIX_NEW,
					# and reuse _SAVE.
					payload_archive.extract(member, path=prefix)

					shutil.move(target, config_new)
					shutil.move(config_save, target)
					continue

				elif os.path.exists(target):
					# If the files are identical, we skip the extraction of a
					# new configuration file. We also do that when the new configuration file
					# is a dummy file.
					if file.size == 0:
						continue

					# Calc hash of the current configuration file.
					config_hash1 = hashlib.new("sha512")
					f = open(target)
					while True:
						buf = f.read(BUFFER_SIZE)
						if not buf:
							break
						config_hash1.update(buf)
					f.close()

					if file.hash1 == config_hash1.hexdigest():
						continue

					# Backup old configuration file and extract new one.
					shutil.move(target, config_save)
					payload_archive.extract(member, path=prefix)

					# Save new configuration file as CONFIG_FILE_SUFFIX_NEW and
					# restore old configuration file.
					shutil.move(target, config_new)
					shutil.move(config_save, target)

					if prefix:
						config_new = os.path.relpath(config_new, prefix)
					messages.append(_("Config file created as %s") % config_new)
					continue

			# Don't overwrite target files if they already exist.
			if file.is_datafile() and os.path.exists(target):
				log.debug(_("Don't overwrite already existing datafile '/%s'") % member.name)
				continue

			# If the member is a directory and if it already exists, we
			# don't need to create it again.
			if os.path.exists(target):
				if member.isdir():
					continue

				else:
					# Remove file if it has been existant
					try:
						os.unlink(target)
					except OSError:
						messages.append(_("Could not remove file: /%s") % member.name)

			#if self.pakfire.config.get("debug"):
			#	msg = "Creating file (%s:%03d:%03d) " % \
			#		(tarfile.filemode(member.mode), member.uid, member.gid)
			#	if member.issym():
			#		msg += "/%s -> %s" % (member.name, member.linkname)
			#	elif member.islnk():
			#		msg += "/%s link to /%s" % (member.name, member.linkname)
			#	else:
			#		msg += "/%s" % member.name
			#	log.debug(msg)

			payload_archive.extract(member, path=prefix)

		# Close all open files.
		payload_archive.close()

		if pb:
			pb.finish()

		# Print messages.
		for msg in messages:
			log.warning(msg)
Example #12
0
	def _remove_files(self, files, message, prefix):
		if prefix in ("/", None):
			prefix = ""

		# Load progressbar.
		pb = None
		if message:
			message = "%-10s : %s" % (message, self.friendly_name)
			pb = util.make_progress(message, len(files), eta=False)

		# Sort files by the length of their name to remove all files in
		# a directory first and then check, if there are any files left.
		files.sort(cmp=lambda x,y: cmp(len(x.name), len(y.name)), reverse=True)

		# Messages to the user.
		messages = []

		i = 0
		for _file in files:
			# Update progress.
			if pb:
				i += 1
				pb.update(i)

			log.debug("Removing file: %s" % _file)

			if prefix:
				file = os.path.join(prefix, _file.name[1:])
				assert file.startswith("%s/" % prefix)
			else:
				file = _file.name

			# Rename configuration files.
			if _file.is_config():
				# Skip already removed config files.
				try:
					os.lstat(file)
				except OSError:
					continue

				file_save = "%s%s" % (file, CONFIG_FILE_SUFFIX_SAVE)

				try:
					shutil.move(file, file_save)
				except shutil.Error, e:
					print e

				if prefix:
					file_save = os.path.relpath(file_save, prefix)
				messages.append(_("Config file saved as %s.") % file_save)
				continue

			# Preserve datafiles.
			if _file.is_datafile():
				log.debug(_("Preserving datafile '/%s'") % _file)
				continue

			# Handle regular files and symlinks.
			if os.path.isfile(file) or os.path.islink(file):
				log.debug("Removing %s..." % _file)
				try:
					os.remove(file)
				except OSError:
					log.error("Cannot remove file: %s. Remove manually." % _file)

			# Handle directories.
			# Skip removal if the directory is a mountpoint.
			elif os.path.isdir(file) and not os.path.ismount(file):
				# Try to remove the directory. If it is not empty, OSError is raised,
				# but we are okay with that.
				try:
					os.rmdir(file)
				except OSError:
					pass

			# Handle files that have already been removed
			# by somebody else.
			elif not os.path.exists(file):
				pass

			# Log all unhandled types.
			else:
				log.warning("Cannot remove file: %s. Filetype is unhandled." % file)
Example #13
0
    def save(self, path=None, algo="xz"):
        """
			This function saves the database and metadata to path so it can
			be exported to a remote repository.
		"""
        if not path:
            path = self.path

        # Create filenames
        metapath = os.path.join(path, METADATA_DOWNLOAD_PATH)
        db_path = os.path.join(metapath, METADATA_DATABASE_FILE)
        md_path = os.path.join(metapath, METADATA_DOWNLOAD_FILE)

        # Remove all pre-existing metadata.
        if os.path.exists(metapath):
            util.rm(metapath)

        # Create directory for metdadata.
        os.makedirs(metapath)

        # Save the database to path and get the filename.
        self.index.write(db_path)

        # Make a reference to the database file that it will get a unique name
        # so we won't get into any trouble with caching proxies.
        db_hash = util.calc_hash1(db_path)

        db_path2 = os.path.join(os.path.dirname(db_path),
                                "%s-%s" % (db_hash, os.path.basename(db_path)))

        # Compress the database.
        if algo:
            # Open input file and get filesize of input file.
            f = open(db_path)
            filesize = os.path.getsize(db_path)

            # Make a nice progress bar.
            p = util.make_progress(_("Compressing database..."), filesize)

            # Create compressing file handler.
            c = compress.compressobj(db_path2)

            try:
                size = 0
                while True:
                    buf = f.read(BUFFER_SIZE)
                    if not buf:
                        break

                    if p:
                        size += len(buf)
                        p.update(size)

                    c.write(buf)
            except:
                # XXX catch compression errors
                raise

            finally:
                f.close()
                c.close()
                p.finish()

                # Remove old database.
                os.unlink(db_path)

        else:
            shutil.move(db_path, db_path2)

        # Create a new metadata object and add out information to it.
        md = metadata.Metadata(self.pakfire)

        # Save name of the hashed database to the metadata.
        md.database = os.path.basename(db_path2)
        md.database_hash1 = db_hash
        md.database_compression = algo

        # Save metdata to repository.
        md.save(md_path)
Example #14
0
	def save(self, path=None, algo="xz"):
		"""
			This function saves the database and metadata to path so it can
			be exported to a remote repository.
		"""
		if not path:
			path = self.path

		# Create filenames
		metapath = os.path.join(path, METADATA_DOWNLOAD_PATH)
		db_path = os.path.join(metapath, METADATA_DATABASE_FILE)
		md_path = os.path.join(metapath, METADATA_DOWNLOAD_FILE)

		# Remove all pre-existing metadata.
		if os.path.exists(metapath):
			util.rm(metapath)

		# Create directory for metdadata.
		os.makedirs(metapath)

		# Save the database to path and get the filename.
		self.index.write(db_path)

		# Make a reference to the database file that it will get a unique name
		# so we won't get into any trouble with caching proxies.
		db_hash = util.calc_hash1(db_path)

		db_path2 = os.path.join(os.path.dirname(db_path),
			"%s-%s" % (db_hash, os.path.basename(db_path)))

		# Compress the database.
		if algo:
			# Open input file and get filesize of input file.
			f = open(db_path)
			filesize = os.path.getsize(db_path)

			# Make a nice progress bar.
			p = util.make_progress(_("Compressing database..."), filesize)

			# Create compressing file handler.
			c = compress.compressobj(db_path2)

			try:
				size = 0
				while True:
					buf = f.read(BUFFER_SIZE)
					if not buf:
						break

					if p:
						size += len(buf)
						p.update(size)

					c.write(buf)
			except:
				# XXX catch compression errors
				raise

			finally:
				f.close()
				c.close()
				p.finish()

				# Remove old database.
				os.unlink(db_path)

		else:
			shutil.move(db_path, db_path2)

		# Create a new metadata object and add out information to it.
		md = metadata.Metadata(self.pakfire)

		# Save name of the hashed database to the metadata.
		md.database = os.path.basename(db_path2)
		md.database_hash1 = db_hash
		md.database_compression = algo

		# Save metdata to repository.
		md.save(md_path)