Example #1
0
    def uuid(self):
        if self.filename:
            hash1 = util.calc_hash1(self.filename)

            # Return UUID version 5 (SHA1 hash)
            return "%8s-%4s-5%3s-%4s-%11s" % \
             (hash1[0:8], hash1[9:13], hash1[14:17], hash1[18:22], hash1[23:34])

        return ""  # XXX What to do here?
Example #2
0
	def uuid(self):
		if self.filename:
			hash1 = util.calc_hash1(self.filename)

			# Return UUID version 5 (SHA1 hash)
			return "%8s-%4s-5%3s-%4s-%11s" % \
				(hash1[0:8], hash1[9:13], hash1[14:17], hash1[18:22], hash1[23:34])

		return "" # XXX What to do here?
Example #3
0
	def hash1(self):
		"""
			Calculate the hash1 of this package.
		"""
		return util.calc_hash1(self.filename)
Example #4
0
    def save(self, path=None, algo="xz"):
        """
			This function saves the database and metadata to path so it can
			be exported to a remote repository.
		"""
        if not path:
            path = self.path

        # Create filenames
        metapath = os.path.join(path, METADATA_DOWNLOAD_PATH)
        db_path = os.path.join(metapath, METADATA_DATABASE_FILE)
        md_path = os.path.join(metapath, METADATA_DOWNLOAD_FILE)

        # Remove all pre-existing metadata.
        if os.path.exists(metapath):
            util.rm(metapath)

        # Create directory for metdadata.
        os.makedirs(metapath)

        # Save the database to path and get the filename.
        self.index.write(db_path)

        # Make a reference to the database file that it will get a unique name
        # so we won't get into any trouble with caching proxies.
        db_hash = util.calc_hash1(db_path)

        db_path2 = os.path.join(os.path.dirname(db_path),
                                "%s-%s" % (db_hash, os.path.basename(db_path)))

        # Compress the database.
        if algo:
            # Open input file and get filesize of input file.
            f = open(db_path)
            filesize = os.path.getsize(db_path)

            # Make a nice progress bar.
            p = util.make_progress(_("Compressing database..."), filesize)

            # Create compressing file handler.
            c = compress.compressobj(db_path2)

            try:
                size = 0
                while True:
                    buf = f.read(BUFFER_SIZE)
                    if not buf:
                        break

                    if p:
                        size += len(buf)
                        p.update(size)

                    c.write(buf)
            except:
                # XXX catch compression errors
                raise

            finally:
                f.close()
                c.close()
                p.finish()

                # Remove old database.
                os.unlink(db_path)

        else:
            shutil.move(db_path, db_path2)

        # Create a new metadata object and add out information to it.
        md = metadata.Metadata(self.pakfire)

        # Save name of the hashed database to the metadata.
        md.database = os.path.basename(db_path2)
        md.database_hash1 = db_hash
        md.database_compression = algo

        # Save metdata to repository.
        md.save(md_path)
Example #5
0
	def save(self, path=None, algo="xz"):
		"""
			This function saves the database and metadata to path so it can
			be exported to a remote repository.
		"""
		if not path:
			path = self.path

		# Create filenames
		metapath = os.path.join(path, METADATA_DOWNLOAD_PATH)
		db_path = os.path.join(metapath, METADATA_DATABASE_FILE)
		md_path = os.path.join(metapath, METADATA_DOWNLOAD_FILE)

		# Remove all pre-existing metadata.
		if os.path.exists(metapath):
			util.rm(metapath)

		# Create directory for metdadata.
		os.makedirs(metapath)

		# Save the database to path and get the filename.
		self.index.write(db_path)

		# Make a reference to the database file that it will get a unique name
		# so we won't get into any trouble with caching proxies.
		db_hash = util.calc_hash1(db_path)

		db_path2 = os.path.join(os.path.dirname(db_path),
			"%s-%s" % (db_hash, os.path.basename(db_path)))

		# Compress the database.
		if algo:
			# Open input file and get filesize of input file.
			f = open(db_path)
			filesize = os.path.getsize(db_path)

			# Make a nice progress bar.
			p = util.make_progress(_("Compressing database..."), filesize)

			# Create compressing file handler.
			c = compress.compressobj(db_path2)

			try:
				size = 0
				while True:
					buf = f.read(BUFFER_SIZE)
					if not buf:
						break

					if p:
						size += len(buf)
						p.update(size)

					c.write(buf)
			except:
				# XXX catch compression errors
				raise

			finally:
				f.close()
				c.close()
				p.finish()

				# Remove old database.
				os.unlink(db_path)

		else:
			shutil.move(db_path, db_path2)

		# Create a new metadata object and add out information to it.
		md = metadata.Metadata(self.pakfire)

		# Save name of the hashed database to the metadata.
		md.database = os.path.basename(db_path2)
		md.database_hash1 = db_hash
		md.database_compression = algo

		# Save metdata to repository.
		md.save(md_path)
Example #6
0
	def hash1(self, filename):
		"""
			Return hash of the file in the cache.
		"""
		return util.calc_hash1(self.abspath(filename))