Ejemplo n.º 1
0
	def write(self, sign=False, force=False):
		""" Write Manifest instance to disk, optionally signing it. Returns
		True if the Manifest is actually written, and False if the write
		is skipped due to existing Manifest being identical."""
		rval = False
		if not self.allow_create:
			return rval
		self.checkIntegrity()
		try:
			myentries = list(self._createManifestEntries())
			update_manifest = True
			existing_st = None
			if myentries and not force:
				try:
					f = io.open(_unicode_encode(self.getFullname(),
						encoding=_encodings['fs'], errors='strict'),
						mode='r', encoding=_encodings['repo.content'],
						errors='replace')
					oldentries = list(self._parseManifestLines(f))
					existing_st = os.fstat(f.fileno())
					f.close()
					if len(oldentries) == len(myentries):
						update_manifest = False
						for i in range(len(oldentries)):
							if oldentries[i] != myentries[i]:
								update_manifest = True
								break
				except (IOError, OSError) as e:
					if e.errno == errno.ENOENT:
						pass
					else:
						raise

			if update_manifest:
				if myentries or not (self.thin or self.allow_missing):
					# If myentries is empty, don't write an empty manifest
					# when thin or allow_missing is enabled. Except for
					# thin manifests with no DIST entries, myentries is
					# non-empty for all currently known use cases.
					write_atomic(self.getFullname(), "".join("%s\n" %
						_unicode(myentry) for myentry in myentries))
					self._apply_max_mtime(existing_st, myentries)
					rval = True
				else:
					# With thin manifest, there's no need to have
					# a Manifest file if there are no DIST entries.
					try:
						os.unlink(self.getFullname())
					except OSError as e:
						if e.errno != errno.ENOENT:
							raise
					rval = True

			if sign:
				self.sign()
		except (IOError, OSError) as e:
			if e.errno == errno.EACCES:
				raise PermissionDenied(str(e))
			raise
		return rval
Ejemplo n.º 2
0
    def _start(self):

        if self.background:
            # Automatically prevent color codes from showing up in logs,
            # since we're not displaying to a terminal anyway.
            self.settings['NOCOLOR'] = 'true'

        if self._enable_ipc_daemon:
            self.settings.pop('PORTAGE_EBUILD_EXIT_FILE', None)
            if self.phase not in self._phases_without_builddir:
                self.settings['PORTAGE_IPC_DAEMON'] = "1"
                self._start_ipc_daemon()
            else:
                self.settings.pop('PORTAGE_IPC_DAEMON', None)
        else:
            # Since the IPC daemon is disabled, use a simple tempfile based
            # approach to detect unexpected exit like in bug #190128.
            self.settings.pop('PORTAGE_IPC_DAEMON', None)
            if self.phase not in self._phases_without_builddir:
                exit_file = os.path.join(self.settings['PORTAGE_BUILDDIR'],
                                         '.exit_status')
                self.settings['PORTAGE_EBUILD_EXIT_FILE'] = exit_file
                try:
                    os.unlink(exit_file)
                except OSError:
                    if os.path.exists(exit_file):
                        # make sure it doesn't exist
                        raise
            else:
                self.settings.pop('PORTAGE_EBUILD_EXIT_FILE', None)

        SpawnProcess._start(self)
Ejemplo n.º 3
0
	def _start(self):
		"""
		Note: Unlike a normal AsynchronousTask.start() method,
		this one does all work is synchronously. The returncode
		attribute will be set before it returns.
		"""

		pkg = self.pkg
		root_config = pkg.root_config
		bintree = root_config.trees["bintree"]
		rval = os.EX_OK
		stdout_orig = sys.stdout
		stderr_orig = sys.stderr
		out = portage.StringIO()
		try:
			sys.stdout = out
			sys.stderr = out
			try:
				bintree.digestCheck(pkg)
			except portage.exception.FileNotFound:
				writemsg("!!! Fetching Binary failed " + \
					"for '%s'\n" % pkg.cpv, noiselevel=-1)
				rval = 1
			except portage.exception.DigestException as e:
				writemsg("\n!!! Digest verification failed:\n",
					noiselevel=-1)
				writemsg("!!! %s\n" % e.value[0],
					noiselevel=-1)
				writemsg("!!! Reason: %s\n" % e.value[1],
					noiselevel=-1)
				writemsg("!!! Got: %s\n" % e.value[2],
					noiselevel=-1)
				writemsg("!!! Expected: %s\n" % e.value[3],
					noiselevel=-1)
				rval = 1
			if rval == os.EX_OK:
				# If this was successful, discard the log here since otherwise
				# we'll get multiple logs for the same package.
				if self.logfile is not None:
					try:
						os.unlink(self.logfile)
					except OSError:
						pass
			else:
				pkg_path = bintree.getname(pkg.cpv)
				head, tail = os.path.split(pkg_path)
				temp_filename = _checksum_failure_temp_file(head, tail)
				writemsg("File renamed to '%s'\n" % (temp_filename,),
					noiselevel=-1)
		finally:
			sys.stdout = stdout_orig
			sys.stderr = stderr_orig

		msg = _unicode_decode(out.getvalue(),
			encoding=_encodings['content'], errors='replace')
		if msg:
			self.scheduler.output(msg, log_path=self.logfile)

		self.returncode = rval
		self.wait()
Ejemplo n.º 4
0
	def _testPipeLogger(self, test_string):

		producer = PopenProcess(proc=subprocess.Popen(
			["bash", "-c", self._echo_cmd % test_string],
			stdout=subprocess.PIPE, stderr=subprocess.STDOUT),
			scheduler=global_event_loop())

		fd, log_file_path = tempfile.mkstemp()
		try:

			consumer = PipeLogger(background=True,
				input_fd=os.dup(producer.proc.stdout.fileno()),
				log_file_path=log_file_path)

			# Close the stdout pipe, since we duplicated it, and it
			# must be closed in order to avoid a ResourceWarning.
			producer.proc.stdout.close()
			producer.pipe_reader = consumer

			producer.start()
			producer.wait()

			self.assertEqual(producer.returncode, os.EX_OK)
			self.assertEqual(consumer.returncode, os.EX_OK)

			with open(log_file_path, 'rb') as f:
				content = f.read()

		finally:
			os.close(fd)
			os.unlink(log_file_path)

		return content.decode('ascii', 'replace')
Ejemplo n.º 5
0
	def testLogfile(self):
		logfile = None
		try:
			fd, logfile = tempfile.mkstemp()
			os.close(fd)
			null_fd = os.open('/dev/null', os.O_RDWR)
			test_string = 2 * "blah blah blah\n"
			scheduler = PollScheduler().sched_iface
			proc = SpawnProcess(
				args=[BASH_BINARY, "-c",
				"echo -n '%s'" % test_string],
				env={}, fd_pipes={0:sys.stdin.fileno(), 1:null_fd, 2:null_fd},
				scheduler=scheduler,
				logfile=logfile)
			proc.start()
			os.close(null_fd)
			self.assertEqual(proc.wait(), os.EX_OK)
			f = io.open(_unicode_encode(logfile,
				encoding=_encodings['fs'], errors='strict'),
				mode='r', encoding=_encodings['content'], errors='strict')
			log_content = f.read()
			f.close()
			# When logging passes through a pty, this comparison will fail
			# unless the oflag terminal attributes have the termios.OPOST
			# bit disabled. Otherwise, tranformations such as \n -> \r\n
			# may occur.
			self.assertEqual(test_string, log_content)
		finally:
			if logfile:
				try:
					os.unlink(logfile)
				except EnvironmentError as e:
					if e.errno != errno.ENOENT:
						raise
					del e
Ejemplo n.º 6
0
	def _start(self):

		if self.background:
			# Automatically prevent color codes from showing up in logs,
			# since we're not displaying to a terminal anyway.
			self.settings['NOCOLOR'] = 'true'

		if self._enable_ipc_daemon:
			self.settings.pop('PORTAGE_EBUILD_EXIT_FILE', None)
			if self.phase not in self._phases_without_builddir:
				self.settings['PORTAGE_IPC_DAEMON'] = "1"
				self._start_ipc_daemon()
			else:
				self.settings.pop('PORTAGE_IPC_DAEMON', None)
		else:
			# Since the IPC daemon is disabled, use a simple tempfile based
			# approach to detect unexpected exit like in bug #190128.
			self.settings.pop('PORTAGE_IPC_DAEMON', None)
			if self.phase not in self._phases_without_builddir:
				exit_file = os.path.join(
					self.settings['PORTAGE_BUILDDIR'],
					'.exit_status')
				self.settings['PORTAGE_EBUILD_EXIT_FILE'] = exit_file
				try:
					os.unlink(exit_file)
				except OSError:
					if os.path.exists(exit_file):
						# make sure it doesn't exist
						raise
			else:
				self.settings.pop('PORTAGE_EBUILD_EXIT_FILE', None)

		SpawnProcess._start(self)
Ejemplo n.º 7
0
    def _init_ipc_fifos(self):

        input_fifo = os.path.join(self.settings["PORTAGE_BUILDDIR"], ".ipc_in")
        output_fifo = os.path.join(self.settings["PORTAGE_BUILDDIR"],
                                   ".ipc_out")

        for p in (input_fifo, output_fifo):

            st = None
            try:
                st = os.lstat(p)
            except OSError:
                os.mkfifo(p)
            else:
                if not stat.S_ISFIFO(st.st_mode):
                    st = None
                    try:
                        os.unlink(p)
                    except OSError:
                        pass
                    os.mkfifo(p)

            apply_secpass_permissions(
                p,
                uid=os.getuid(),
                gid=portage.data.portage_gid,
                mode=0o770,
                stat_cached=st,
            )

        return (input_fifo, output_fifo)
Ejemplo n.º 8
0
	def _init_ipc_fifos(self):

		input_fifo = os.path.join(
			self.settings['PORTAGE_BUILDDIR'], '.ipc_in')
		output_fifo = os.path.join(
			self.settings['PORTAGE_BUILDDIR'], '.ipc_out')

		for p in (input_fifo, output_fifo):

			st = None
			try:
				st = os.lstat(p)
			except OSError:
				os.mkfifo(p)
			else:
				if not stat.S_ISFIFO(st.st_mode):
					st = None
					try:
						os.unlink(p)
					except OSError:
						pass
					os.mkfifo(p)

			apply_secpass_permissions(p,
				uid=os.getuid(),
				gid=portage.data.portage_gid,
				mode=0o770, stat_cached=st)

		return (input_fifo, output_fifo)
Ejemplo n.º 9
0
	def priming_commit(self, myupdates, myremoved, commitmessage):
		myfiles = myupdates + myremoved
		fd, commitmessagefile = tempfile.mkstemp(".repoman.msg")
		mymsg = os.fdopen(fd, "wb")
		mymsg.write(_unicode_encode(commitmessage))
		mymsg.close()

		separator = '-' * 78

		print()
		print(green("Using commit message:"))
		print(green(separator))
		print(commitmessage)
		print(green(separator))
		print()

		# Having a leading ./ prefix on file paths can trigger a bug in
		# the cvs server when committing files to multiple directories,
		# so strip the prefix.
		myfiles = [f.lstrip("./") for f in myfiles]

		retval = self.vcs_settings.changes.commit(myfiles, commitmessagefile)
		# cleanup the commit message before possibly exiting
		try:
			os.unlink(commitmessagefile)
		except OSError:
			pass
		if retval != os.EX_OK:
			writemsg_level(
				"!!! Exiting on %s (shell) "
				"error code: %s\n" % (self.vcs_settings.vcs, retval),
				level=logging.ERROR, noiselevel=-1)
			sys.exit(retval)
Ejemplo n.º 10
0
	def _testPipeLogger(self, test_string):

		producer = PopenProcess(proc=subprocess.Popen(
			["bash", "-c", self._echo_cmd % test_string],
			stdout=subprocess.PIPE, stderr=subprocess.STDOUT),
			scheduler=global_event_loop())

		fd, log_file_path = tempfile.mkstemp()
		try:

			consumer = PipeLogger(background=True,
				input_fd=producer.proc.stdout,
				log_file_path=log_file_path)

			producer.pipe_reader = consumer

			producer.start()
			producer.wait()

			self.assertEqual(producer.returncode, os.EX_OK)
			self.assertEqual(consumer.returncode, os.EX_OK)

			with open(log_file_path, 'rb') as f:
				content = f.read()

		finally:
			os.close(fd)
			os.unlink(log_file_path)

		return content.decode('ascii', 'replace')
Ejemplo n.º 11
0
	def add_manifest(self, mymanifests, myheaders, myupdates, myremoved,
					commitmessage):
		myfiles = mymanifests[:]
		# If there are no header (SVN/CVS keywords) changes in
		# the files, this Manifest commit must include the
		# other (yet uncommitted) files.
		if not myheaders:
			myfiles += myupdates
			myfiles += myremoved
		myfiles.sort()

		fd, commitmessagefile = tempfile.mkstemp(".repoman.msg")
		mymsg = os.fdopen(fd, "wb")
		mymsg.write(_unicode_encode(commitmessage))
		mymsg.close()

		retval = self.vcs_settings.changes.commit(myfiles, commitmessagefile)
		# cleanup the commit message before possibly exiting
		try:
			os.unlink(commitmessagefile)
		except OSError:
			pass
		if retval != os.EX_OK:
			writemsg_level(
				"!!! Exiting on %s (shell) "
				"error code: %s\n" % (self.vcs_settings.vcs, retval),
				level=logging.ERROR, noiselevel=-1)
			sys.exit(retval)
Ejemplo n.º 12
0
    def add_manifest(self, mymanifests, myheaders, myupdates, myremoved,
                     commitmessage):
        myfiles = mymanifests[:]
        # If there are no header (SVN/CVS keywords) changes in
        # the files, this Manifest commit must include the
        # other (yet uncommitted) files.
        if not myheaders:
            myfiles += myupdates
            myfiles += myremoved
        myfiles.sort()

        fd, commitmessagefile = tempfile.mkstemp(".repoman.msg")
        mymsg = os.fdopen(fd, "wb")
        mymsg.write(_unicode_encode(commitmessage))
        mymsg.close()

        retval = self.vcs_settings.changes.commit(myfiles, commitmessagefile)
        # cleanup the commit message before possibly exiting
        try:
            os.unlink(commitmessagefile)
        except OSError:
            pass
        if retval != os.EX_OK:
            writemsg_level("!!! Exiting on %s (shell) "
                           "error code: %s\n" %
                           (self.vcs_settings.vcs, retval),
                           level=logging.ERROR,
                           noiselevel=-1)
            sys.exit(retval)
Ejemplo n.º 13
0
    def priming_commit(self, myupdates, myremoved, commitmessage):
        myfiles = myupdates + myremoved
        fd, commitmessagefile = tempfile.mkstemp(".repoman.msg")
        mymsg = os.fdopen(fd, "wb")
        mymsg.write(_unicode_encode(commitmessage))
        mymsg.close()

        separator = '-' * 78

        print()
        print(green("Using commit message:"))
        print(green(separator))
        print(commitmessage)
        print(green(separator))
        print()

        # Having a leading ./ prefix on file paths can trigger a bug in
        # the cvs server when committing files to multiple directories,
        # so strip the prefix.
        myfiles = [f.lstrip("./") for f in myfiles]

        retval = self.vcs_settings.changes.commit(myfiles, commitmessagefile)
        # cleanup the commit message before possibly exiting
        try:
            os.unlink(commitmessagefile)
        except OSError:
            pass
        if retval != os.EX_OK:
            writemsg_level("!!! Exiting on %s (shell) "
                           "error code: %s\n" %
                           (self.vcs_settings.vcs, retval),
                           level=logging.ERROR,
                           noiselevel=-1)
            sys.exit(retval)
Ejemplo n.º 14
0
	def close(self):
		"""Closes the temporary file, copies permissions (if possible),
		and performs the atomic replacement via os.rename().  If the abort()
		method has been called, then the temp file is closed and removed."""
		f = object.__getattribute__(self, '_file')
		real_name = object.__getattribute__(self, '_real_name')
		if not f.closed:
			try:
				f.close()
				if not object.__getattribute__(self, '_aborted'):
					try:
						apply_stat_permissions(f.name, os.stat(real_name))
					except OperationNotPermitted:
						pass
					except FileNotFound:
						pass
					except OSError as oe: # from the above os.stat call
						if oe.errno in (errno.ENOENT, errno.EPERM):
							pass
						else:
							raise
					os.rename(f.name, real_name)
			finally:
				# Make sure we cleanup the temp file
				# even if an exception is raised.
				try:
					os.unlink(f.name)
				except OSError as oe:
					pass
Ejemplo n.º 15
0
    def _verifier_exit(self, verifier):
        if verifier is not None and self._default_exit(verifier) != os.EX_OK:
            self._async_unlock_builddir(returncode=self.returncode)
            return

        logger = self.logger
        pkg = self.pkg
        pkg_count = self.pkg_count

        if self._fetched_pkg:
            pkg_path = self._bintree.getname(
                self._bintree.inject(pkg.cpv, filename=self._fetched_pkg),
                allocate_new=False,
            )
        else:
            pkg_path = self.pkg.root_config.trees["bintree"].getname(self.pkg.cpv)

        # This gives bashrc users an opportunity to do various things
        # such as remove binary packages after they're installed.
        if pkg_path is not None:
            self.settings["PORTAGE_BINPKG_FILE"] = pkg_path
        self._pkg_path = pkg_path

        logfile = self.settings.get("PORTAGE_LOG_FILE")
        if logfile is not None and os.path.isfile(logfile):
            # Remove fetch log after successful fetch.
            try:
                os.unlink(logfile)
            except OSError:
                pass

        if self.opts.fetchonly:
            self._current_task = None
            self.returncode = os.EX_OK
            self.wait()
            return

        msg = " === (%s of %s) Merging Binary (%s::%s)" % (
            pkg_count.curval,
            pkg_count.maxval,
            pkg.cpv,
            pkg_path,
        )
        short_msg = "emerge: (%s of %s) %s Merge Binary" % (
            pkg_count.curval,
            pkg_count.maxval,
            pkg.cpv,
        )
        logger.log(msg, short_msg=short_msg)

        phase = "clean"
        settings = self.settings
        ebuild_phase = EbuildPhase(
            background=self.background,
            phase=phase,
            scheduler=self.scheduler,
            settings=settings,
        )

        self._start_task(ebuild_phase, self._clean_exit)
Ejemplo n.º 16
0
def _prepare_fake_distdir(settings, alist):
	orig_distdir = settings["DISTDIR"]
	edpath = os.path.join(settings["PORTAGE_BUILDDIR"], "distdir")
	portage.util.ensure_dirs(edpath, gid=portage_gid, mode=0o755)

	# Remove any unexpected files or directories.
	for x in os.listdir(edpath):
		symlink_path = os.path.join(edpath, x)
		st = os.lstat(symlink_path)
		if x in alist and stat.S_ISLNK(st.st_mode):
			continue
		if stat.S_ISDIR(st.st_mode):
			shutil.rmtree(symlink_path)
		else:
			os.unlink(symlink_path)

	# Check for existing symlinks and recreate if necessary.
	for x in alist:
		symlink_path = os.path.join(edpath, x)
		target = os.path.join(orig_distdir, x)
		try:
			link_target = os.readlink(symlink_path)
		except OSError:
			os.symlink(target, symlink_path)
		else:
			if link_target != target:
				os.unlink(symlink_path)
				os.symlink(target, symlink_path)
Ejemplo n.º 17
0
    def _testPipeLogger(self, test_string):

        producer = PopenProcess(proc=subprocess.Popen(
            ["bash", "-c", self._echo_cmd % test_string],
            stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT),
                                scheduler=global_event_loop())

        fd, log_file_path = tempfile.mkstemp()
        try:

            consumer = PipeLogger(background=True,
                                  input_fd=producer.proc.stdout,
                                  log_file_path=log_file_path)

            producer.pipe_reader = consumer

            producer.start()
            producer.wait()

            self.assertEqual(producer.returncode, os.EX_OK)
            self.assertEqual(consumer.returncode, os.EX_OK)

            with open(log_file_path, 'rb') as f:
                content = f.read()

        finally:
            os.close(fd)
            os.unlink(log_file_path)

        return content.decode('ascii', 'replace')
Ejemplo n.º 18
0
	def testLogfile(self):
		logfile = None
		try:
			fd, logfile = tempfile.mkstemp()
			os.close(fd)
			null_fd = os.open('/dev/null', os.O_RDWR)
			test_string = 2 * "blah blah blah\n"
			task_scheduler = TaskScheduler()
			proc = SpawnProcess(
				args=[BASH_BINARY, "-c",
				"echo -n '%s'" % test_string],
				env={}, fd_pipes={0:sys.stdin.fileno(), 1:null_fd, 2:null_fd},
				scheduler=task_scheduler.sched_iface,
				logfile=logfile)
			task_scheduler.add(proc)
			task_scheduler.run()
			os.close(null_fd)
			f = codecs.open(_unicode_encode(logfile,
				encoding=_encodings['fs'], errors='strict'),
				mode='r', encoding=_encodings['content'], errors='strict')
			log_content = f.read()
			f.close()
			# When logging passes through a pty, this comparison will fail
			# unless the oflag terminal attributes have the termios.OPOST
			# bit disabled. Otherwise, tranformations such as \n -> \r\n
			# may occur.
			self.assertEqual(test_string, log_content)
		finally:
			if logfile:
				try:
					os.unlink(logfile)
				except EnvironmentError as e:
					if e.errno != errno.ENOENT:
						raise
					del e
Ejemplo n.º 19
0
	def _recycle_copier_exit(self, copier):

		self._assert_current(copier)
		if self._was_cancelled():
			self.wait()
			return

		success = True
		if copier.returncode == os.EX_OK:

			try:
				os.unlink(copier.src_path)
			except OSError as e:
				if e.errno not in (errno.ENOENT, errno.ESTALE):
					logging.error("%s unlink failed in distfiles: %s" %
						(self.distfile, e))
					success = False

		else:
			logging.error(("%s copy from distfiles "
				"to recycle failed: %s") % (self.distfile, e))
			success = False

		if success:
			self._success()
			self.returncode = os.EX_OK
		else:
			self.returncode = 1

		self._current_task = None
		self.wait()
Ejemplo n.º 20
0
    def write(self, sign=False, force=False):
        """ Write Manifest instance to disk, optionally signing it. Returns
		True if the Manifest is actually written, and False if the write
		is skipped due to existing Manifest being identical."""
        rval = False
        if not self.allow_create:
            return rval
        self.checkIntegrity()
        try:
            myentries = list(self._createManifestEntries())
            update_manifest = True
            if myentries and not force:
                try:
                    f = io.open(_unicode_encode(self.getFullname(),
                                                encoding=_encodings['fs'],
                                                errors='strict'),
                                mode='r',
                                encoding=_encodings['repo.content'],
                                errors='replace')
                    oldentries = list(self._parseManifestLines(f))
                    f.close()
                    if len(oldentries) == len(myentries):
                        update_manifest = False
                        for i in range(len(oldentries)):
                            if oldentries[i] != myentries[i]:
                                update_manifest = True
                                break
                except (IOError, OSError) as e:
                    if e.errno == errno.ENOENT:
                        pass
                    else:
                        raise

            if update_manifest:
                if myentries or not (self.thin or self.allow_missing):
                    # If myentries is empty, don't write an empty manifest
                    # when thin or allow_missing is enabled. Except for
                    # thin manifests with no DIST entries, myentries is
                    # non-empty for all currently known use cases.
                    write_atomic(
                        self.getFullname(), "".join("%s\n" % _unicode(myentry)
                                                    for myentry in myentries))
                    rval = True
                else:
                    # With thin manifest, there's no need to have
                    # a Manifest file if there are no DIST entries.
                    try:
                        os.unlink(self.getFullname())
                    except OSError as e:
                        if e.errno != errno.ENOENT:
                            raise
                    rval = True

            if sign:
                self.sign()
        except (IOError, OSError) as e:
            if e.errno == errno.EACCES:
                raise PermissionDenied(str(e))
            raise
        return rval
Ejemplo n.º 21
0
    def _testPipeLogger(self, test_string):

        producer = PopenProcess(proc=subprocess.Popen(
            ["bash", "-c", self._echo_cmd % test_string],
            stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT),
                                scheduler=global_event_loop())

        fd, log_file_path = tempfile.mkstemp()
        try:

            consumer = PipeLogger(background=True,
                                  input_fd=os.dup(
                                      producer.proc.stdout.fileno()),
                                  log_file_path=log_file_path)

            # Close the stdout pipe, since we duplicated it, and it
            # must be closed in order to avoid a ResourceWarning.
            producer.proc.stdout.close()
            producer.pipe_reader = consumer

            producer.start()
            producer.wait()

            self.assertEqual(producer.returncode, os.EX_OK)
            self.assertEqual(consumer.returncode, os.EX_OK)

            with open(log_file_path, 'rb') as f:
                content = f.read()

        finally:
            os.close(fd)
            os.unlink(log_file_path)

        return content.decode('ascii', 'replace')
Ejemplo n.º 22
0
	def _hardlink_atomic(self, src, dest, dir_info):

		head, tail = os.path.split(dest)
		hardlink_tmp = os.path.join(head, ".%s._mirrordist_hardlink_.%s" % \
			(tail, os.getpid()))

		try:
			try:
				os.link(src, hardlink_tmp)
			except OSError as e:
				if e.errno != errno.EXDEV:
					msg = "hardlink %s from %s failed: %s" % \
						(self.distfile, dir_info, e)
					self.scheduler.output(msg + '\n', background=True,
						log_path=self._log_path)
					logging.error(msg)
				return False

			try:
				os.rename(hardlink_tmp, dest)
			except OSError as e:
				msg = "hardlink rename '%s' from %s failed: %s" % \
					(self.distfile, dir_info, e)
				self.scheduler.output(msg + '\n', background=True,
					log_path=self._log_path)
				logging.error(msg)
				return False
		finally:
			try:
				os.unlink(hardlink_tmp)
			except OSError:
				pass

		return True
Ejemplo n.º 23
0
    def _success(self):
        if not self._previously_added:
            size = self.digests["size"]
            self.config.added_byte_count += size
            self.config.added_file_count += 1
            self.config.log_success("%s\t%s\tadded %i bytes" %
                                    (self.cpv, self.distfile, size))

        if self._log_path is not None:
            if not self.config.options.dry_run:
                try:
                    os.unlink(self._log_path)
                except OSError:
                    pass

        if self.config.options.recycle_dir is not None:

            recycle_file = os.path.join(self.config.options.recycle_dir,
                                        self.distfile)

            if self.config.options.dry_run:
                if os.path.exists(recycle_file):
                    logging.info("dry-run: delete '%s' from recycle" %
                                 (self.distfile, ))
            else:
                try:
                    os.unlink(recycle_file)
                except OSError:
                    pass
                else:
                    logging.debug("delete '%s' from recycle" %
                                  (self.distfile, ))
Ejemplo n.º 24
0
	def _success(self):
		if not self._previously_added:
			size = self.digests["size"]
			self.config.added_byte_count += size
			self.config.added_file_count += 1
			self.config.log_success("%s\t%s\tadded %i bytes" %
				(self.cpv, self.distfile, size))

		if self._log_path is not None:
			if not self.config.options.dry_run:
				try:
					os.unlink(self._log_path)
				except OSError:
					pass

		if self.config.options.recycle_dir is not None:

			recycle_file = os.path.join(
				self.config.options.recycle_dir, self.distfile)

			if self.config.options.dry_run:
				if os.path.exists(recycle_file):
					logging.info("dry-run: delete '%s' from recycle" %
						(self.distfile,))
			else:
				try:
					os.unlink(recycle_file)
				except OSError:
					pass
				else:
					logging.debug("delete '%s' from recycle" %
						(self.distfile,))
Ejemplo n.º 25
0
	def _fetch_copier_exit(self, copier):

		self._assert_current(copier)

		try:
			os.unlink(self._fetch_tmp_file)
		except OSError:
			pass

		if self._was_cancelled():
			self.wait()
			return

		if copier.returncode == os.EX_OK:
			self._success()
			self.returncode = os.EX_OK
			self.wait()
		else:
			# out of space?
			msg = "%s %s copy failed unexpectedly" % \
				(self.distfile, self._fetch_tmp_dir_info)
			self.scheduler.output(msg + '\n', background=True,
				log_path=self._log_path)
			logging.error(msg)
			self.config.log_failure("%s\t%s\t%s" %
				(self.cpv, self.distfile, msg))
			self.config.file_failures[self.distfile] = self.cpv
			self.returncode = 1
			self.wait()
Ejemplo n.º 26
0
    def _recycle_copier_exit(self, copier):

        self._assert_current(copier)
        if self._was_cancelled():
            self.wait()
            return

        success = True
        if copier.returncode == os.EX_OK:

            try:
                os.unlink(copier.src_path)
            except OSError as e:
                if e.errno not in (errno.ENOENT, errno.ESTALE):
                    logging.error("%s unlink failed in distfiles: %s" %
                                  (self.distfile, e))
                    success = False

        else:
            logging.error(("%s copy from distfiles "
                           "to recycle failed: %s") % (self.distfile, e))
            success = False

        if success:
            self._success()
            self.returncode = os.EX_OK
        else:
            self.returncode = 1

        self._current_task = None
        self.wait()
Ejemplo n.º 27
0
def perform_checksum(filename, hashname="MD5", calc_prelink=0):
	"""
	Run a specific checksum against a file. The filename can
	be either unicode or an encoded byte string. If filename
	is unicode then a UnicodeDecodeError will be raised if
	necessary.

	@param filename: File to run the checksum against
	@type filename: String
	@param hashname: The type of hash function to run
	@type hashname: String
	@param calc_prelink: Whether or not to reverse prelink before running the checksum
	@type calc_prelink: Integer
	@rtype: Tuple
	@return: The hash and size of the data
	"""
	global prelink_capable
	# Make sure filename is encoded with the correct encoding before
	# it is passed to spawn (for prelink) and/or the hash function.
	filename = _unicode_encode(filename,
		encoding=_encodings['fs'], errors='strict')
	myfilename = filename
	prelink_tmpfile = None
	try:
		if (calc_prelink and prelink_capable and
		    is_prelinkable_elf(filename)):
			# Create non-prelinked temporary file to checksum.
			# Files rejected by prelink are summed in place.
			try:
				tmpfile_fd, prelink_tmpfile = tempfile.mkstemp()
				try:
					retval = portage.process.spawn([PRELINK_BINARY,
						"--verify", filename], fd_pipes={1:tmpfile_fd})
				finally:
					os.close(tmpfile_fd)
				if retval == os.EX_OK:
					myfilename = prelink_tmpfile
			except portage.exception.CommandNotFound:
				# This happens during uninstallation of prelink.
				prelink_capable = False
		try:
			if hashname not in hashfunc_keys:
				raise portage.exception.DigestException(hashname + \
					" hash function not available (needs dev-python/pycrypto)")
			myhash, mysize = hashfunc_map[hashname].checksum_file(myfilename)
		except (OSError, IOError) as e:
			if e.errno in (errno.ENOENT, errno.ESTALE):
				raise portage.exception.FileNotFound(myfilename)
			elif e.errno == portage.exception.PermissionDenied.errno:
				raise portage.exception.PermissionDenied(myfilename)
			raise
		return myhash, mysize
	finally:
		if prelink_tmpfile:
			try:
				os.unlink(prelink_tmpfile)
			except OSError as e:
				if e.errno != errno.ENOENT:
					raise
				del e
Ejemplo n.º 28
0
def _archive_copy(src_st, src_path, dest_path):
    """
    Copy file from src_path to dest_path. Regular files and symlinks
    are supported. If an EnvironmentError occurs, then it is logged
    to stderr.

    @param src_st: source file lstat result
    @type src_st: posix.stat_result
    @param src_path: source file path
    @type src_path: str
    @param dest_path: destination file path
    @type dest_path: str
    """
    # Remove destination file in order to ensure that the following
    # symlink or copy2 call won't fail (see bug #535850).
    try:
        os.unlink(dest_path)
    except OSError:
        pass
    try:
        if stat.S_ISLNK(src_st.st_mode):
            os.symlink(os.readlink(src_path), dest_path)
        else:
            shutil.copy2(src_path, dest_path)
    except EnvironmentError as e:
        portage.util.writemsg(
            _("dispatch-conf: Error copying %(src_path)s to "
              "%(dest_path)s: %(reason)s\n") % {
                  "src_path": src_path,
                  "dest_path": dest_path,
                  "reason": e
              },
            noiselevel=-1,
        )
Ejemplo n.º 29
0
	def close(self):
		"""Closes the temporary file, copies permissions (if possible),
		and performs the atomic replacement via os.rename().  If the abort()
		method has been called, then the temp file is closed and removed."""
		f = object.__getattribute__(self, '_file')
		real_name = object.__getattribute__(self, '_real_name')
		if not f.closed:
			try:
				f.close()
				if not object.__getattribute__(self, '_aborted'):
					try:
						apply_stat_permissions(f.name, os.stat(real_name))
					except OperationNotPermitted:
						pass
					except FileNotFound:
						pass
					except OSError as oe: # from the above os.stat call
						if oe.errno in (errno.ENOENT, errno.EPERM):
							pass
						else:
							raise
					os.rename(f.name, real_name)
			finally:
				# Make sure we cleanup the temp file
				# even if an exception is raised.
				try:
					os.unlink(f.name)
				except OSError as oe:
					pass
Ejemplo n.º 30
0
def perform_checksum(filename, hashname="MD5", calc_prelink=0):
	"""
	Run a specific checksum against a file. The filename can
	be either unicode or an encoded byte string. If filename
	is unicode then a UnicodeDecodeError will be raised if
	necessary.

	@param filename: File to run the checksum against
	@type filename: String
	@param hashname: The type of hash function to run
	@type hashname: String
	@param calc_prelink: Whether or not to reverse prelink before running the checksum
	@type calc_prelink: Integer
	@rtype: Tuple
	@return: The hash and size of the data
	"""
	global prelink_capable
	# Make sure filename is encoded with the correct encoding before
	# it is passed to spawn (for prelink) and/or the hash function.
	filename = _unicode_encode(filename,
		encoding=_encodings['fs'], errors='strict')
	myfilename = filename
	prelink_tmpfile = None
	try:
		if (calc_prelink and prelink_capable and
		    is_prelinkable_elf(filename)):
			# Create non-prelinked temporary file to checksum.
			# Files rejected by prelink are summed in place.
			try:
				tmpfile_fd, prelink_tmpfile = tempfile.mkstemp()
				try:
					retval = portage.process.spawn([PRELINK_BINARY,
						"--verify", filename], fd_pipes={1:tmpfile_fd})
				finally:
					os.close(tmpfile_fd)
				if retval == os.EX_OK:
					myfilename = prelink_tmpfile
			except portage.exception.CommandNotFound:
				# This happens during uninstallation of prelink.
				prelink_capable = False
		try:
			if hashname not in hashfunc_map:
				raise portage.exception.DigestException(hashname + \
					" hash function not available (needs dev-python/pycrypto)")
			myhash, mysize = hashfunc_map[hashname](myfilename)
		except (OSError, IOError) as e:
			if e.errno in (errno.ENOENT, errno.ESTALE):
				raise portage.exception.FileNotFound(myfilename)
			elif e.errno == portage.exception.PermissionDenied.errno:
				raise portage.exception.PermissionDenied(myfilename)
			raise
		return myhash, mysize
	finally:
		if prelink_tmpfile:
			try:
				os.unlink(prelink_tmpfile)
			except OSError as e:
				if e.errno != errno.ENOENT:
					raise
				del e
Ejemplo n.º 31
0
def _archive_copy(src_st, src_path, dest_path):
	"""
	Copy file from src_path to dest_path. Regular files and symlinks
	are supported. If an EnvironmentError occurs, then it is logged
	to stderr.

	@param src_st: source file lstat result
	@type src_st: posix.stat_result
	@param src_path: source file path
	@type src_path: str
	@param dest_path: destination file path
	@type dest_path: str
	"""
	# Remove destination file in order to ensure that the following
	# symlink or copy2 call won't fail (see bug #535850).
	try:
		os.unlink(dest_path)
	except OSError:
		pass
	try:
		if stat.S_ISLNK(src_st.st_mode):
			os.symlink(os.readlink(src_path), dest_path)
		else:
			shutil.copy2(src_path, dest_path)
	except EnvironmentError as e:
		portage.util.writemsg(
			_('dispatch-conf: Error copying %(src_path)s to '
			'%(dest_path)s: %(reason)s\n') % {
				"src_path": src_path,
				"dest_path": dest_path,
				"reason": e
			}, noiselevel=-1)
Ejemplo n.º 32
0
    def _package_phase_exit(self, package_phase):

        self.settings.pop("PORTAGE_BINPKG_TMPFILE", None)
        self.settings.pop("PORTAGE_DEBUGSYMBOLS_TMPFILE", None)
        if self._default_exit(package_phase) != os.EX_OK:
            try:
                os.unlink(self._binpkg_tmpfile)
            except OSError:
                pass
            try:
                os.unlink(self._symbols_tmpfile)
            except OSError:
                pass
            self.wait()
            return

        pkg = self.pkg
        if self._symbols_tmpfile and os.path.isfile(self._symbols_tmpfile):
            xpak_data = xpak.xpak_mem({
                "CATEGORY": pkg.category,
                "PF": pkg.pf + "-debug",
                "SLOT": pkg._metadata['SLOT']
            })
            xpak.tbz2(self._symbols_tmpfile).recompose_mem(xpak_data)
        bintree = pkg.root_config.trees["bintree"]
        bintree.inject(pkg.cpv, filename=self._binpkg_tmpfile)

        self._current_task = None
        self.returncode = os.EX_OK
        self.wait()
Ejemplo n.º 33
0
    def _fetch_copier_exit(self, copier):

        self._assert_current(copier)

        try:
            os.unlink(self._fetch_tmp_file)
        except OSError:
            pass

        if self._was_cancelled():
            self.wait()
            return

        if copier.returncode == os.EX_OK:
            self._success()
            self.returncode = os.EX_OK
            self.wait()
        else:
            # out of space?
            msg = "%s %s copy failed unexpectedly" % \
             (self.distfile, self._fetch_tmp_dir_info)
            self.scheduler.output(msg + '\n',
                                  background=True,
                                  log_path=self._log_path)
            logging.error(msg)
            self.config.log_failure("%s\t%s\t%s" %
                                    (self.cpv, self.distfile, msg))
            self.config.file_failures[self.distfile] = self.cpv
            self.returncode = 1
            self.wait()
Ejemplo n.º 34
0
def collect_ebuild_messages(path):
	""" Collect elog messages generated by the bash logging function stored 
		at 'path'.
	"""
	mylogfiles = None
	try:
		mylogfiles = os.listdir(path)
	except OSError:
		pass
	# shortcut for packages without any messages
	if not mylogfiles:
		return {}
	# exploit listdir() file order so we process log entries in chronological order
	mylogfiles.reverse()
	logentries = {}
	for msgfunction in mylogfiles:
		filename = os.path.join(path, msgfunction)
		if msgfunction not in EBUILD_PHASES:
			writemsg(_("!!! can't process invalid log file: %s\n") % filename,
				noiselevel=-1)
			continue
		if not msgfunction in logentries:
			logentries[msgfunction] = []
		lastmsgtype = None
		msgcontent = []
		f = io.open(_unicode_encode(filename,
			encoding=_encodings['fs'], errors='strict'),
			mode='r', encoding=_encodings['repo.content'], errors='replace')
		for l in f:
			l = l.rstrip('\n')
			if not l:
				continue
			try:
				msgtype, msg = l.split(" ", 1)
			except ValueError:
				writemsg(_("!!! malformed entry in "
					"log file: '%s'\n") % filename, noiselevel=-1)
				continue

			if lastmsgtype is None:
				lastmsgtype = msgtype
			
			if msgtype == lastmsgtype:
				msgcontent.append(msg)
			else:
				if msgcontent:
					logentries[msgfunction].append((lastmsgtype, msgcontent))
				msgcontent = [msg]
			lastmsgtype = msgtype
		f.close()
		if msgcontent:
			logentries[msgfunction].append((lastmsgtype, msgcontent))

	# clean logfiles to avoid repetitions
	for f in mylogfiles:
		try:
			os.unlink(os.path.join(path, f))
		except OSError:
			pass
	return logentries
Ejemplo n.º 35
0
 def clean_log(self):
     """Discard existing log."""
     settings = self.settings
     log_file = settings.get('PORTAGE_LOG_FILE')
     if log_file is not None and os.path.isfile(log_file):
         try:
             os.unlink(log_file)
         except OSError:
             pass
Ejemplo n.º 36
0
    def _fetch_uri(self, uri):

        if self.config.options.dry_run:
            # Simply report success.
            logging.info("dry-run: fetch '%s' from '%s'" %
                         (self.distfile, uri))
            self._success()
            self.returncode = os.EX_OK
            self._async_wait()
            return

        if self.config.options.temp_dir:
            self._fetch_tmp_dir_info = "temp-dir"
            distdir = self.config.options.temp_dir
        else:
            self._fetch_tmp_dir_info = "distfiles"
            distdir = self.config.options.distfiles

        tmp_basename = self.distfile + "._emirrordist_fetch_.%s" % portage.getpid(
        )

        variables = {"DISTDIR": distdir, "URI": uri, "FILE": tmp_basename}

        self._fetch_tmp_file = os.path.join(distdir, tmp_basename)

        try:
            os.unlink(self._fetch_tmp_file)
        except OSError:
            pass

        args = portage.util.shlex_split(default_fetchcommand)
        args = [portage.util.varexpand(x, mydict=variables) for x in args]

        args = [
            _unicode_encode(x, encoding=_encodings["fs"], errors="strict")
            for x in args
        ]

        null_fd = os.open(os.devnull, os.O_RDONLY)
        fetcher = PopenProcess(
            background=self.background,
            proc=subprocess.Popen(args,
                                  stdin=null_fd,
                                  stdout=subprocess.PIPE,
                                  stderr=subprocess.STDOUT),
            scheduler=self.scheduler,
        )
        os.close(null_fd)

        fetcher.pipe_reader = PipeLogger(
            background=self.background,
            input_fd=fetcher.proc.stdout,
            log_file_path=self._log_path,
            scheduler=self.scheduler,
        )

        self._start_task(fetcher, self._fetcher_exit)
 def clean_log(self):
     """Discard existing log."""
     settings = self.settings
     log_file = settings.get("PORTAGE_LOG_FILE")
     if log_file is not None and os.path.isfile(log_file):
         try:
             os.unlink(log_file)
         except OSError:
             pass
Ejemplo n.º 38
0
def collect_ebuild_messages(path):
	""" Collect elog messages generated by the bash logging function stored 
		at 'path'.
	"""
	mylogfiles = None
	try:
		mylogfiles = os.listdir(path)
	except OSError:
		pass
	# shortcut for packages without any messages
	if not mylogfiles:
		return {}
	# exploit listdir() file order so we process log entries in chronological order
	mylogfiles.reverse()
	logentries = {}
	for msgfunction in mylogfiles:
		filename = os.path.join(path, msgfunction)
		if msgfunction not in EBUILD_PHASES:
			writemsg(_("!!! can't process invalid log file: %s\n") % filename,
				noiselevel=-1)
			continue
		if not msgfunction in logentries:
			logentries[msgfunction] = []
		lastmsgtype = None
		msgcontent = []
		for l in codecs.open(_unicode_encode(filename,
			encoding=_encodings['fs'], errors='strict'),
			mode='r', encoding=_encodings['repo.content'], errors='replace'):
			if not l:
				continue
			try:
				msgtype, msg = l.split(" ", 1)
			except ValueError:
				writemsg(_("!!! malformed entry in "
					"log file: '%s'\n") % filename, noiselevel=-1)
				continue

			if lastmsgtype is None:
				lastmsgtype = msgtype
			
			if msgtype == lastmsgtype:
				msgcontent.append(msg)
			else:
				if msgcontent:
					logentries[msgfunction].append((lastmsgtype, msgcontent))
				msgcontent = [msg]
			lastmsgtype = msgtype
		if msgcontent:
			logentries[msgfunction].append((lastmsgtype, msgcontent))

	# clean logfiles to avoid repetitions
	for f in mylogfiles:
		try:
			os.unlink(os.path.join(path, f))
		except OSError:
			pass
	return logentries
Ejemplo n.º 39
0
    def _fetch_digester_exit(self, digester):

        self._assert_current(digester)
        if self._was_cancelled():
            self.wait()
            return

        if digester.returncode != os.EX_OK:
            msg = "%s %s digester failed unexpectedly" % (
                self.distfile,
                self._fetch_tmp_dir_info,
            )
            self.scheduler.output(msg + "\n",
                                  background=True,
                                  log_path=self._log_path)
            logging.error(msg)
        else:
            bad_digest = self._find_bad_digest(digester.digests)
            if bad_digest is not None:
                msg = "%s has bad %s digest: expected %s, got %s" % (
                    self.distfile,
                    bad_digest,
                    self.digests[bad_digest],
                    digester.digests[bad_digest],
                )
                self.scheduler.output(msg + "\n",
                                      background=True,
                                      log_path=self._log_path)
                try:
                    os.unlink(self._fetch_tmp_file)
                except OSError:
                    pass
            else:
                dest = os.path.join(
                    self.config.options.distfiles,
                    self.config.layouts[0].get_path(self.distfile),
                )
                ensure_dirs(os.path.dirname(dest))
                try:
                    os.rename(self._fetch_tmp_file, dest)
                except OSError:
                    self._start_task(
                        FileCopier(
                            src_path=self._fetch_tmp_file,
                            dest_path=dest,
                            background=(self.background
                                        and self._log_path is not None),
                            logfile=self._log_path,
                        ),
                        self._fetch_copier_exit,
                    )
                    return
                else:
                    self._make_layout_links()
                    return

        self._try_next_mirror()
Ejemplo n.º 40
0
    def _start(self):

        distfile_path = os.path.join(self.config.options.distfiles,
                                     self.distfile)

        if self.config.options.recycle_dir is not None:
            distfile_path = os.path.join(self.config.options.distfiles,
                                         self.distfile)
            recycle_path = os.path.join(self.config.options.recycle_dir,
                                        self.distfile)
            if self.config.options.dry_run:
                logging.info(("dry-run: move '%s' from "
                              "distfiles to recycle") % self.distfile)
            else:
                logging.debug(("move '%s' from "
                               "distfiles to recycle") % self.distfile)
                try:
                    os.rename(distfile_path, recycle_path)
                except OSError as e:
                    if e.errno != errno.EXDEV:
                        logging.error(
                            ("rename %s from distfiles to "
                             "recycle failed: %s") % (self.distfile, e))
                else:
                    self.returncode = os.EX_OK
                    self._async_wait()
                    return

                self._start_task(
                    FileCopier(src_path=distfile_path,
                               dest_path=recycle_path,
                               background=False), self._recycle_copier_exit)
                return

        success = True

        if self.config.options.dry_run:
            logging.info(("dry-run: delete '%s' from "
                          "distfiles") % self.distfile)
        else:
            logging.debug(("delete '%s' from " "distfiles") % self.distfile)
            try:
                os.unlink(distfile_path)
            except OSError as e:
                if e.errno not in (errno.ENOENT, errno.ESTALE):
                    logging.error("%s unlink failed in distfiles: %s" %
                                  (self.distfile, e))
                    success = False

        if success:
            self._success()
            self.returncode = os.EX_OK
        else:
            self.returncode = 1

        self._async_wait()
Ejemplo n.º 41
0
	def _start(self):

		distfile_path = os.path.join(
			self.config.options.distfiles, self.distfile)

		if self.config.options.recycle_dir is not None:
			distfile_path = os.path.join(self.config.options.distfiles, self.distfile)
			recycle_path = os.path.join(
				self.config.options.recycle_dir, self.distfile)
			if self.config.options.dry_run:
				logging.info(("dry-run: move '%s' from "
					"distfiles to recycle") % self.distfile)
			else:
				logging.debug(("move '%s' from "
					"distfiles to recycle") % self.distfile)
				try:
					os.rename(distfile_path, recycle_path)
				except OSError as e:
					if e.errno != errno.EXDEV:
						logging.error(("rename %s from distfiles to "
							"recycle failed: %s") % (self.distfile, e))
				else:
					self.returncode = os.EX_OK
					self._async_wait()
					return

				self._start_task(
					FileCopier(src_path=distfile_path,
						dest_path=recycle_path,
						background=False),
					self._recycle_copier_exit)
				return

		success = True

		if self.config.options.dry_run:
			logging.info(("dry-run: delete '%s' from "
				"distfiles") % self.distfile)
		else:
			logging.debug(("delete '%s' from "
				"distfiles") % self.distfile)
			try:
				os.unlink(distfile_path)
			except OSError as e:
				if e.errno not in (errno.ENOENT, errno.ESTALE):
					logging.error("%s unlink failed in distfiles: %s" %
						(self.distfile, e))
					success = False

		if success:
			self._success()
			self.returncode = os.EX_OK
		else:
			self.returncode = 1

		self._async_wait()
Ejemplo n.º 42
0
    def _start(self):
        """
		Note: Unlike a normal AsynchronousTask.start() method,
		this one does all work is synchronously. The returncode
		attribute will be set before it returns.
		"""

        pkg = self.pkg
        root_config = pkg.root_config
        bintree = root_config.trees["bintree"]
        rval = os.EX_OK
        stdout_orig = sys.stdout
        stderr_orig = sys.stderr
        out = portage.StringIO()
        try:
            sys.stdout = out
            sys.stderr = out
            try:
                bintree.digestCheck(pkg)
            except portage.exception.FileNotFound:
                writemsg("!!! Fetching Binary failed " + \
                 "for '%s'\n" % pkg.cpv, noiselevel=-1)
                rval = 1
            except portage.exception.DigestException as e:
                writemsg("\n!!! Digest verification failed:\n", noiselevel=-1)
                writemsg("!!! %s\n" % e.value[0], noiselevel=-1)
                writemsg("!!! Reason: %s\n" % e.value[1], noiselevel=-1)
                writemsg("!!! Got: %s\n" % e.value[2], noiselevel=-1)
                writemsg("!!! Expected: %s\n" % e.value[3], noiselevel=-1)
                rval = 1
            if rval == os.EX_OK:
                # If this was successful, discard the log here since otherwise
                # we'll get multiple logs for the same package.
                if self.logfile is not None:
                    try:
                        os.unlink(self.logfile)
                    except OSError:
                        pass
            else:
                pkg_path = bintree.getname(pkg.cpv)
                head, tail = os.path.split(pkg_path)
                temp_filename = _checksum_failure_temp_file(head, tail)
                writemsg("File renamed to '%s'\n" % (temp_filename, ),
                         noiselevel=-1)
        finally:
            sys.stdout = stdout_orig
            sys.stderr = stderr_orig

        msg = _unicode_decode(out.getvalue(),
                              encoding=_encodings['content'],
                              errors='replace')
        if msg:
            self.scheduler.output(msg, log_path=self.logfile)

        self.returncode = rval
        self.wait()
Ejemplo n.º 43
0
	def testDisplayIfKeyword(self):
		tmpItem = self.fakeItem[:].replace("#Display-If-Keyword:", "Display-If-Keyword: %s" %
			self.keywords)

		try:
			item = self._processItem(tmpItem)
			self.assertTrue(item.isRelevant(self.vardb, self.settings, self.profile),
				msg="Expected %s to be relevant, but it was not!" % tmpItem)
		finally:
			os.unlink(item.path)
Ejemplo n.º 44
0
	def testDisplayIfInstalled(self):
		tmpItem = self.fakeItem[:].replace("#Display-If-Installed:", "Display-If-Installed: %s" %
			"sys-apps/portage")

		try:
			item = self._processItem(tmpItem)
			self.assertTrue(item.isRelevant(self.vardb, self.settings, self.profile),
				msg="Expected %s to be relevant, but it was not!" % tmpItem)
		finally:
			os.unlink(item.path)
Ejemplo n.º 45
0
 def _install_exit(self, task):
     self.settings.pop("PORTAGE_BINPKG_FILE", None)
     self._unlock_builddir()
     if task.returncode == os.EX_OK and \
      'binpkg-logs' not in self.settings.features and \
      self.settings.get("PORTAGE_LOG_FILE"):
         try:
             os.unlink(self.settings["PORTAGE_LOG_FILE"])
         except OSError:
             pass
Ejemplo n.º 46
0
	def testDisplayIfKeyword(self):
		tmpItem = self.fakeItem[:].replace("#Display-If-Keyword:", "Display-If-Keyword: %s" %
			self.keywords)

		try:
			item = self._processItem(tmpItem)
			self.assertTrue(item.isRelevant(self.vardb, self.settings, self.profile),
				msg="Expected %s to be relevant, but it was not!" % tmpItem)
		finally:
			os.unlink(item.path)
Ejemplo n.º 47
0
	def testDisplayIfInstalled(self):
		tmpItem = self.fakeItem[:].replace("#Display-If-Installed:", "Display-If-Installed: %s" %
			"sys-apps/portage")

		try:
			item = self._processItem(tmpItem)
			self.assertTrue(item.isRelevant(self.vardb, self.settings, self.profile),
				msg="Expected %s to be relevant, but it was not!" % tmpItem)
		finally:
			os.unlink(item.path)
Ejemplo n.º 48
0
	def _install_exit(self, task):
		self.settings.pop("PORTAGE_BINPKG_FILE", None)
		self._unlock_builddir()
		if task.returncode == os.EX_OK and \
			'binpkg-logs' not in self.settings.features and \
			self.settings.get("PORTAGE_LOG_FILE"):
			try:
				os.unlink(self.settings["PORTAGE_LOG_FILE"])
			except OSError:
				pass
Ejemplo n.º 49
0
    def add_manifest(self, mymanifests, myheaders, myupdates, myremoved,
                     commitmessage):
        myfiles = mymanifests[:]
        # If there are no header (SVN/CVS keywords) changes in
        # the files, this Manifest commit must include the
        # other (yet uncommitted) files.
        if not myheaders:
            myfiles += myupdates
            myfiles += myremoved
        myfiles.sort()

        fd, commitmessagefile = tempfile.mkstemp(".repoman.msg")
        mymsg = os.fdopen(fd, "wb")
        mymsg.write(_unicode_encode(commitmessage))
        mymsg.close()

        commit_cmd = []
        if self.options.pretend and self.vcs_settings.vcs is None:
            # substitute a bogus value for pretend output
            commit_cmd.append("cvs")
        else:
            commit_cmd.append(self.vcs_settings.vcs)
        commit_cmd.extend(self.vcs_settings.vcs_global_opts)
        commit_cmd.append("commit")
        commit_cmd.extend(self.vcs_settings.vcs_local_opts)
        if self.vcs_settings.vcs == "hg":
            commit_cmd.extend(["--logfile", commitmessagefile])
            commit_cmd.extend(myfiles)
        else:
            commit_cmd.extend(["-F", commitmessagefile])
            commit_cmd.extend(f.lstrip("./") for f in myfiles)

        try:
            if self.options.pretend:
                print("(%s)" % (" ".join(commit_cmd), ))
            else:
                retval = spawn(commit_cmd, env=self.repo_settings.commit_env)
                if retval != os.EX_OK:
                    if self.repo_settings.repo_config.sign_commit and self.vcs_settings.vcs == 'git' and \
                     not git_supports_gpg_sign():
                        # Inform user that newer git is needed (bug #403323).
                        logging.error(
                            "Git >=1.7.9 is required for signed commits!")

                    writemsg_level("!!! Exiting on %s (shell) "
                                   "error code: %s\n" %
                                   (self.vcs_settings.vcs, retval),
                                   level=logging.ERROR,
                                   noiselevel=-1)
                    sys.exit(retval)
        finally:
            try:
                os.unlink(commitmessagefile)
            except OSError:
                pass
Ejemplo n.º 50
0
	def add_manifest(self, mymanifests, myheaders, myupdates, myremoved,
					commitmessage):
		myfiles = mymanifests[:]
		# If there are no header (SVN/CVS keywords) changes in
		# the files, this Manifest commit must include the
		# other (yet uncommitted) files.
		if not myheaders:
			myfiles += myupdates
			myfiles += myremoved
		myfiles.sort()

		fd, commitmessagefile = tempfile.mkstemp(".repoman.msg")
		mymsg = os.fdopen(fd, "wb")
		mymsg.write(_unicode_encode(commitmessage))
		mymsg.close()

		commit_cmd = []
		if self.options.pretend and self.vcs_settings.vcs is None:
			# substitute a bogus value for pretend output
			commit_cmd.append("cvs")
		else:
			commit_cmd.append(self.vcs_settings.vcs)
		commit_cmd.extend(self.vcs_settings.vcs_global_opts)
		commit_cmd.append("commit")
		commit_cmd.extend(self.vcs_settings.vcs_local_opts)
		if self.vcs_settings.vcs == "hg":
			commit_cmd.extend(["--logfile", commitmessagefile])
			commit_cmd.extend(myfiles)
		else:
			commit_cmd.extend(["-F", commitmessagefile])
			commit_cmd.extend(f.lstrip("./") for f in myfiles)

		try:
			if self.options.pretend:
				print("(%s)" % (" ".join(commit_cmd),))
			else:
				retval = spawn(commit_cmd, env=self.repo_settings.commit_env)
				if retval != os.EX_OK:
					if self.repo_settings.repo_config.sign_commit and self.vcs_settings.vcs == 'git' and \
						not git_supports_gpg_sign():
						# Inform user that newer git is needed (bug #403323).
						logging.error(
							"Git >=1.7.9 is required for signed commits!")

					writemsg_level(
						"!!! Exiting on %s (shell) "
						"error code: %s\n" % (self.vcs_settings.vcs, retval),
						level=logging.ERROR, noiselevel=-1)
					sys.exit(retval)
		finally:
			try:
				os.unlink(commitmessagefile)
			except OSError:
				pass
Ejemplo n.º 51
0
def get_commit_message_with_editor(editor, message=None):
    """
	Execute editor with a temporary file as it's argument
	and return the file content afterwards.

	@param editor: An EDITOR value from the environment
	@type: string
	@param message: An iterable of lines to show in the editor.
	@type: iterable
	@rtype: string or None
	@returns: A string on success or None if an error occurs.
	"""
    from tempfile import mkstemp
    fd, filename = mkstemp()
    try:
        os.write(fd, _unicode_encode(
         "\n# Please enter the commit message " + \
         "for your changes.\n# (Comment lines starting " + \
         "with '#' will not be included)\n",
         encoding=_encodings['content'], errors='backslashreplace'))
        if message:
            os.write(
                fd,
                _unicode_encode("#\n",
                                encoding=_encodings['content'],
                                errors='backslashreplace'))
            for line in message:
                os.write(
                    fd,
                    _unicode_encode("#" + line,
                                    encoding=_encodings['content'],
                                    errors='backslashreplace'))
        os.close(fd)
        retval = os.system(editor + " '%s'" % filename)
        if not (os.WIFEXITED(retval) and os.WEXITSTATUS(retval) == os.EX_OK):
            return None
        try:
            mylines = codecs.open(_unicode_encode(filename,
                                                  encoding=_encodings['fs'],
                                                  errors='strict'),
                                  mode='r',
                                  encoding=_encodings['content'],
                                  errors='replace').readlines()
        except OSError as e:
            if e.errno != errno.ENOENT:
                raise
            del e
            return None
        return "".join(line for line in mylines if not line.startswith("#"))
    finally:
        try:
            os.unlink(filename)
        except OSError:
            pass
Ejemplo n.º 52
0
def unhardlink_lockfile(lockfilename, unlinkfile=True):
	myhardlock = hardlock_name(lockfilename)
	if unlinkfile and hardlink_is_mine(myhardlock, lockfilename):
		# Make sure not to touch lockfilename unless we really have a lock.
		try:
			os.unlink(lockfilename)
		except OSError:
			pass
	try:
		os.unlink(myhardlock)
	except OSError:
		pass
Ejemplo n.º 53
0
def _prepare_fake_filesdir(settings):
	real_filesdir = settings["O"]+"/files"
	symlink_path = settings["FILESDIR"]

	try:
		link_target = os.readlink(symlink_path)
	except OSError:
		os.symlink(real_filesdir, symlink_path)
	else:
		if link_target != real_filesdir:
			os.unlink(symlink_path)
			os.symlink(real_filesdir, symlink_path)
Ejemplo n.º 54
0
	def _unlink_file(self, file_path, dir_info):
		try:
			os.unlink(file_path)
		except OSError as e:
			if e.errno not in (errno.ENOENT, errno.ESTALE):
				msg = "unlink '%s' failed in %s: %s" % \
					(self.distfile, dir_info, e)
				self.scheduler.output(msg + '\n', background=True,
					log_path=self._log_path)
				logging.error(msg)
				return False
		return True
Ejemplo n.º 55
0
	def _unlink_file(self, file_path, dir_info):
		try:
			os.unlink(file_path)
		except OSError as e:
			if e.errno not in (errno.ENOENT, errno.ESTALE):
				msg = "unlink '%s' failed in %s: %s" % \
					(self.distfile, dir_info, e)
				self.scheduler.output(msg + '\n', background=True,
					log_path=self._log_path)
				logging.error(msg)
				return False
		return True
Ejemplo n.º 56
0
def rcs_archive_post_process(archive):
	"""Check in the archive file with the .dist.new suffix on the branch
	and remove the one with the .dist suffix."""
	os.rename(archive + '.dist.new', archive)
	if os.path.lexists(archive + '.dist'):
		# Commit the last-distributed version onto the branch.
		os.system(RCS_LOCK + RCS_BRANCH + ' ' + archive)
		os.system(RCS_PUT + ' -r' + RCS_BRANCH + ' ' + archive)
		os.unlink(archive + '.dist')
	else:
		# Forcefully commit the last-distributed version onto the branch.
		os.system(RCS_PUT + ' -f -r' + RCS_BRANCH + ' ' + archive)
Ejemplo n.º 57
0
def get_commit_message_with_editor(editor, message=None):
    """
	Execute editor with a temporary file as it's argument
	and return the file content afterwards.

	@param editor: An EDITOR value from the environment
	@type: string
	@param message: An iterable of lines to show in the editor.
	@type: iterable
	@rtype: string or None
	@return: A string on success or None if an error occurs.
	"""
    fd, filename = mkstemp()
    try:
        os.write(
            fd,
            _unicode_encode(
                _(
                    "\n# Please enter the commit message "
                    + "for your changes.\n# (Comment lines starting "
                    + "with '#' will not be included)\n"
                ),
                encoding=_encodings["content"],
                errors="backslashreplace",
            ),
        )
        if message:
            os.write(fd, b"#\n")
            for line in message:
                os.write(fd, _unicode_encode("#" + line, encoding=_encodings["content"], errors="backslashreplace"))
        os.close(fd)
        retval = os.system(editor + " '%s'" % filename)
        if not (os.WIFEXITED(retval) and os.WEXITSTATUS(retval) == os.EX_OK):
            return None
        try:
            with io.open(
                _unicode_encode(filename, encoding=_encodings["fs"], errors="strict"),
                mode="r",
                encoding=_encodings["content"],
                errors="replace",
            ) as f:
                mylines = f.readlines()
        except OSError as e:
            if e.errno != errno.ENOENT:
                raise
            del e
            return None
        return "".join(line for line in mylines if not line.startswith("#"))
    finally:
        try:
            os.unlink(filename)
        except OSError:
            pass
Ejemplo n.º 58
0
	def _verifier_exit(self, verifier):
		if verifier is not None and \
			self._default_exit(verifier) != os.EX_OK:
			self._unlock_builddir()
			self.wait()
			return

		logger = self.logger
		pkg = self.pkg
		pkg_count = self.pkg_count

		if self._fetched_pkg:
			pkg_path = self._bintree.getname(
				self._bintree.inject(pkg.cpv,
				filename=self._fetched_pkg),
				allocate_new=False)
		else:
			pkg_path = self.pkg.root_config.trees["bintree"].getname(
				self.pkg.cpv)

		# This gives bashrc users an opportunity to do various things
		# such as remove binary packages after they're installed.
		self.settings["PORTAGE_BINPKG_FILE"] = pkg_path
		self._pkg_path = pkg_path

		logfile = self.settings.get("PORTAGE_LOG_FILE")
		if logfile is not None and os.path.isfile(logfile):
			# Remove fetch log after successful fetch.
			try:
				os.unlink(logfile)
			except OSError:
				pass

		if self.opts.fetchonly:
			self._current_task = None
			self.returncode = os.EX_OK
			self.wait()
			return

		msg = " === (%s of %s) Merging Binary (%s::%s)" % \
			(pkg_count.curval, pkg_count.maxval, pkg.cpv, pkg_path)
		short_msg = "emerge: (%s of %s) %s Merge Binary" % \
			(pkg_count.curval, pkg_count.maxval, pkg.cpv)
		logger.log(msg, short_msg=short_msg)

		phase = "clean"
		settings = self.settings
		ebuild_phase = EbuildPhase(background=self.background,
			phase=phase, scheduler=self.scheduler,
			settings=settings)

		self._start_task(ebuild_phase, self._clean_exit)