Exemplo n.º 1
0
 def sandbox_summary(self, move_log=False):
     """if the instance is sandboxed, print the sandbox access summary"""
     if not os.path.exists(self.__sandbox_log):
         self.write("end_sandbox_summary")
         return 0
     violations = portage_util.grabfile(self.__sandbox_log)
     if len(violations) == 0:
         self.write("end_sandbox_summary")
         return 0
     if not move_log:
         move_log = self.__sandbox_log
     elif move_log != self.__sandbox_log:
         myf = open(move_log)
         for x in violations:
             myf.write(x + "\n")
         myf.close()
     from output import red
     self.ebd_write.write(
         red("--------------------------- ACCESS VIOLATION SUMMARY ---------------------------"
             ) + "\n")
     self.ebd_write.write(red("LOG FILE = \"%s\"" % move_log) + "\n\n")
     for x in violations:
         self.ebd_write.write(x + "\n")
     self.write(
         red("--------------------------------------------------------------------------------"
             ) + "\n")
     self.write("end_sandbox_summary")
     try:
         os.remove(self.__sandbox_log)
     except (IOError, OSError), e:
         print "exception caught when cleansing sandbox_log=%s" % str(e)
Exemplo n.º 2
0
    def git_merge_all(self,
                      from_branch,
                      to_branch,
                      working_path='/var/release'):
        """
        Merge all Git Repositories from one branch into another.
        :param from_branch: What branch to merge from
        :param to_branch: What branch to merge into
        :param working_path:
        :return:
        """
        if not os.path.exists(working_path):
            # if path doesn't exist, create it:
            os.mkdir(working_path)

        os.chdir(working_path)

        for repo in self.config.repositories:
            os.chdir(working_path)
            out(1, blue("\n------- REPO: " + repo + " -------"))
            # see if the repo exists
            path = working_path + '/' + repo

            output = ''
            try:
                if not os.path.exists(path):
                    output += self.exec_shell('git clone ' + self.git_server +
                                              '/' + repo + '.git ' + path)

                    if 'Access denied.' in output:
                        out(2, yellow('skipped'))
                        continue

                os.chdir(path)

                output += self.exec_shell('git reset --hard HEAD')
                output += self.exec_shell(
                    'git checkout --force {}'.format(from_branch))
                output += self.exec_shell('git pull')
                output += self.exec_shell(
                    'git checkout --force {}'.format(to_branch))
                output += self.exec_shell('git pull')
                output += self.exec_shell('git merge {}'.format(from_branch))
                output += self.exec_shell(
                    'git push origin {}'.format(to_branch))

                for line in output.splitlines(True):
                    if line.startswith('error') or line.startswith('CONFLICT'):
                        out(2, red(line))
                    else:
                        out(2, green(line))

            except Exception as e:
                out(2, red('Error: '))
                out(2, red(output))
                out(2, red(e))
                return False
        return output
Exemplo n.º 3
0
 def check_cmd_exists(self, cmd):
     "return the path of the specified command if it exists"
     print blue("Checking for %s..." % cmd),
     for path in os.environ['PATH'].split(os.path.pathsep):
         if os.path.exists(os.path.join(path, cmd)):
             print blue("'%s'" % os.path.join(path, cmd))
             return 1
     print red("not found.")
     return 0
Exemplo n.º 4
0
    def git_merge_all(self, from_branch, to_branch, working_path='/var/release'):
        """
        Merge all Git Repositories from one branch into another.
        :param from_branch: What branch to merge from
        :param to_branch: What branch to merge into
        :param working_path:
        :return:
        """
        if not os.path.exists(working_path):
            # if path doesn't exist, create it:
            os.mkdir(working_path)

        os.chdir(working_path)

        for repo in self.config.repositories:
            os.chdir(working_path)
            out(1, blue("\n------- REPO: " + repo + " -------"))
            # see if the repo exists
            path = working_path+'/'+repo

            output = ''
            try:
                if not os.path.exists(path):
                    output += self.exec_shell('git clone '+self.git_server+'/'+repo+'.git ' + path)

                    if 'Access denied.' in output:
                        out(2, yellow('skipped'))
                        continue

                os.chdir(path)

                output += self.exec_shell('git reset --hard HEAD')
                output += self.exec_shell('git checkout --force {}'.format(from_branch))
                output += self.exec_shell('git pull')
                output += self.exec_shell('git checkout --force {}'.format(to_branch))
                output += self.exec_shell('git pull')
                output += self.exec_shell('git merge {}'.format(from_branch))
                output += self.exec_shell('git push origin {}'.format(to_branch))

                for line in output.splitlines(True):
                    if line.startswith('error') or line.startswith('CONFLICT'):
                        out(2, red(line))
                    else:
                        out(2, green(line))

            except Exception as e:
                out(2, red('Error: '))
                out(2, red(output))
                out(2, red(e))
                return False
        return output
Exemplo n.º 5
0
    def start_ssh(self, url):
        """
        start an ssh connection
        :param url:
        :return:
        """
        # use current user if none was passed in.
        if self.ssh_user is None:
            self.ssh_user = getpass.getuser()

        # if we haven't already started this connection, start it
        if url not in self.connections:
            try:
                # paramiko.util.log_to_file("paramiko.log")
                ssh = paramiko.SSHClient()
                ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
                ssh.allow_agent = False
                ssh.connect(url, username=self.ssh_user, password=self.pw)

            except Exception as e:
                out(0, red("SSH connection to {} failed: ".format(url)))
                print(e)
                return False
            # add this connection to the list of open connections
            self.connections[url] = ssh

        # set ssh_alias as the current connection key to be used in exec_shell
        self.current_conn_key = url

        return True
Exemplo n.º 6
0
    def start_ssh(self, url):
        """
        start an ssh connection
        :param url:
        :return:
        """
        # use current user if none was passed in.
        if self.ssh_user is None:
            self.ssh_user = getpass.getuser()

        # if we haven't already started this connection, start it
        if url not in self.connections:
            try:
                # paramiko.util.log_to_file("paramiko.log")
                ssh = paramiko.SSHClient()
                ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
                ssh.allow_agent = False
                ssh.connect(url, username=self.ssh_user, password=self.pw)

            except Exception as e:
                out(0, red("SSH connection to {} failed: ".format(url)))
                print(e)
                return False
            # add this connection to the list of open connections
            self.connections[url] = ssh

        # set ssh_alias as the current connection key to be used in exec_shell
        self.current_conn_key = url

        return True
Exemplo n.º 7
0
    def copy_and_swigify_headers(self, includedir, dirname, files):
        """Copy the header files to the local include directories. Add an
        #include line at the beginning for the SWIG interface files..."""

        for file in files:
            if not os.path.isfile(os.path.join(dirname, file)):
                continue

            if file[-2:] == ".i":
                file_i = os.path.join(dirname, file)
                file_h = os.path.join(dirname, file)[:-2] + ".h"

                if (not os.path.exists(file_h)
                        and os.path.exists(os.path.join(includedir, file_h))):
                    shutil.copyfile(os.path.join(includedir, file_h), file_h)
                    sys.stdout.write(' ' + turquoise(file_h))
                    fd = open(file_h, 'r+')
                    contents = fd.readlines()

                    ins_line_nr = -1
                    for line in contents:
                        ins_line_nr += 1
                        if line.find("#include ") != -1:
                            break

                    if ins_line_nr != -1:
                        contents.insert(ins_line_nr,
                                        self.pivy_header_include % (file_i))
                        fd.seek(0)
                        fd.writelines(contents)
                    else:
                        print blue("[") + red("failed") + blue("]")
                        sys.exit(1)
                    fd.close
            # fixes for SWIG 1.3.21 and upwards
            # (mostly workarounding swig's preprocessor "function like macros"
            # preprocessor bug when no parameters are provided which then results
            # in no constructors being created in the wrapper)
            elif file[-4:] == ".fix":
                sys.stdout.write(' ' + red(os.path.join(dirname, file)[:-4]))
                shutil.copyfile(os.path.join(dirname, file),
                                os.path.join(dirname, file)[:-4])
            # had to introduce this because windows is a piece of crap
            elif sys.platform == "win32" and file[-6:] == ".win32":
                sys.stdout.write(' ' + red(os.path.join(dirname, file)[:-6]))
                shutil.copyfile(os.path.join(dirname, file),
                                os.path.join(dirname, file)[:-6])
Exemplo n.º 8
0
	def printFlags(self):
		print output.white("\nFlags set:")
		keys = self.initflags.keys()
		keys.sort()
		for key in keys:
			if self.initflags[key]:
				print key + " "*(self.flag_width-len(str(key))) + output.green("On")
			else:
				print key + " "*(self.flag_width-len(str(key))) + output.red("Off")
Exemplo n.º 9
0
    def swig_generate(self):
        "build all available modules"

        for module in self.MODULES:
            module_name = self.MODULES[module][0]
            config_cmd = self.MODULES[module][1]
            module_pkg_name = self.MODULES[module][2]
            mod_out_prefix = module_pkg_name.replace('.', os.sep) + module

            if sys.platform == "win32":
                INCLUDE_DIR = os.getenv("COIN3DDIR") + "\\include"
                CPP_FLAGS = "-I" + INCLUDE_DIR +  " " + \
                            "-I" + os.getenv("COIN3DDIR") + "\\include\\Inventor\\annex" + \
                            " /DSOWIN_DLL /DCOIN_DLL /wd4244 /wd4049"
                LDFLAGS_LIBS = os.getenv("COIN3DDIR") + "\\lib\\coin2.lib " + \
                               os.getenv("COIN3DDIR") + "\\lib\\sowin1.lib"
            else:
                INCLUDE_DIR = self.do_os_popen("coin-config --includedir")
                CPP_FLAGS = self.do_os_popen("%s --cppflags" % config_cmd)
                LDFLAGS_LIBS = self.do_os_popen("%s --ldflags --libs" %
                                                config_cmd)

            self.CXX_INCS = self.CXX_INCS + ' ' + CPP_FLAGS  #G.Barrand
            if not os.path.isfile(mod_out_prefix + "_wrap.cpp"):
                print red("\n=== Generating %s_wrap.cpp for %s ===\n" %
                          (mod_out_prefix, module))
                print blue(
                    self.SWIG + " " + self.SWIG_SUPPRESS_WARNINGS + " " +
                    self.SWIG_PARAMS %
                    (INCLUDE_DIR, self.CXX_INCS, mod_out_prefix, module))
                if os.system(
                        self.SWIG + " " + self.SWIG_SUPPRESS_WARNINGS + " " +
                        self.SWIG_PARAMS %
                    (INCLUDE_DIR, self.CXX_INCS, mod_out_prefix, module)):
                    print red(
                        "SWIG did not generate wrappers successfully! ** Aborting **"
                    )
                    sys.exit(1)
            else:
                print red("=== %s_wrap.cpp for %s already exists! ===" %
                          (mod_out_prefix, module_pkg_name + module))

            self.ext_modules.append(
                Extension(
                    module_name, [mod_out_prefix + "_wrap.cpp"],
                    extra_compile_args=(self.CXX_INCS + CPP_FLAGS).split(),
                    extra_link_args=(self.CXX_LIBS + LDFLAGS_LIBS).split()))
            self.py_modules.append(module_pkg_name + module)
Exemplo n.º 10
0
				print "!!! Cannot create log... No write access / Does not exist"
				print "!!! PORT_LOGDIR:",mysettings["PORT_LOGDIR"]
				mysettings["PORT_LOGDIR"]=""

		# if any of these are being called, handle them -- running them out of the sandbox -- and stop now.
		if mydo in ["help","setup"]:
			return 0
#			return spawn(EBUILD_SH_BINARY+" "+mydo,mysettings,debug=debug,free=1,logfile=logfile)
		elif mydo in ["prerm","postrm","preinst","postinst","config"]:
			mysettings.load_infodir(pkg_dir)
			if not use_info_env:
				print "overloading port_env_file setting to %s" % mysettings["T"]+"/environment"
				mysettings["PORT_ENV_FILE"] = mysettings["T"] + "/environment"
				if not os.path.exists(mysettings["PORT_ENV_FILE"]):
					from output import red
					print red("!!!")+" err.. it doesn't exist.  that's bad."
					sys.exit(1)
			return 0
#			return spawn(EBUILD_SH_BINARY+" "+mydo,mysettings,debug=debug,free=1,logfile=logfile)
	
		try: 
			mysettings["SLOT"], mysettings["RESTRICT"] = db["/"]["porttree"].dbapi.aux_get(mycpv,["SLOT","RESTRICT"])
		except (IOError,KeyError):
			print red("doebuild():")+" aux_get() error reading "+mycpv+"; aborting."
			sys.exit(1)

		#initial dep checks complete; time to process main commands
	
		nosandbox=(("userpriv" in features) and ("usersandbox" not in features))
		actionmap={
				  "depend": {                 "args":(0,1)},         # sandbox  / portage
Exemplo n.º 11
0
Inter-Organisational Intrusion Detection System (IOIDS)

Check README in the IOIDS folder for more information.

@author: Michael Pilgermann
@contact: mailto:[email protected]
@license: GPL (General Public License)
"""

import output
from sys import stdout

oneindent = ' ' * 3
SUCESS_POS = output.green('  OK  ')
SUCESS_NEG = output.red('FAILED')
SUCESS_SKIP = output.yellow(' SKIP ')
SUCESS_WARN = output.yellow('  !!  ')

COLUMN_SUCESS = 80
COLUMN_INPUT = 70
LENGTH_LINE = 89


def printAction(indent, text, linebreak = 0, out = stdout):
    """
    Prints a line for an action and puts the cursor on a predefined column.
    
    Usually, no line break is written, the line should be finished after performing an
    action using the function L{finishActionLine}.
    """
Exemplo n.º 12
0
# Setup file for the Pivy distribution.
#
import os, shutil, sys

from distutils.command.build import build
from distutils.command.clean import clean
from distutils.command.install import install
from distutils.core import setup
from distutils.extension import Extension
from distutils import sysconfig

# if we are on a Gentoo box salute the chap and output stuff in nice colors
# Gentoo is Python friendly, so be especially friendly to them! ;)
try:
    from output import green, blue, turquoise, red, yellow
    print red("Oooh, it's a Gentoo! Nice nice! tuhtah salutes you! :)")
except:

    def red(text):
        return text

    def green(text):
        return text

    def blue(text):
        return text

    def turquoise(text):
        return text

    def yellow(text):
Exemplo n.º 13
0
 def check_python_version(self):
     "check the Python version"
     print blue("Python version...%s" % sys.version.split(" ")[0])
     if int(sys.version[0]) < 2:
         print red("Pivy only works with Python versions >= 2.0.")
         sys.exit(1)
Exemplo n.º 14
0
def svnerr(message):
		sys.stderr.write(output.red("   SVN: ") + message + "\n")
Exemplo n.º 15
0
def eerror(message):
	sys.stderr.write(output.red(" * ") + message + "\n")
Exemplo n.º 16
0
	def process_phase(self,phase,mysettings,myebuild,myroot,allstages=False,**keywords):
		"""the public 'doebuild' interface- all phases are called here, along w/ a valid config
		allstages is the equivalent of 'do merge, and all needed phases to get to it'
		**keywords is options passed on to __adjust_env.  It will be removed as __adjust_env is digested"""
		from portage import merge,unmerge,features

		validcommands = ["help","clean","prerm","postrm","preinst","postinst",
		                "config","setup","depend","fetch","digest",
		                "unpack","compile","test","install","rpm","qmerge","merge",
		                "package","unmerge", "manifest"]
	
		if phase not in validcommands:
			validcommands.sort()
			writemsg("!!! doebuild: '%s' is not one of the following valid commands:" % phase)
			for vcount in range(len(validcommands)):
				if vcount%6 == 0:
					writemsg("\n!!! ")
				writemsg(string.ljust(validcommands[vcount], 11))
			writemsg("\n")
			return 1

		retval=self.__adjust_env(phase,mysettings,myebuild,myroot,**keywords)
		if retval:
			return retval

		if "userpriv" in features:
			sandbox = ("usersandbox" in features)
		else:
			sandbox = ("sandbox" in features)

	        droppriv=(("userpriv" in features) and \
	                ("nouserpriv" not in string.split(mysettings["PORTAGE_RESTRICT"])) and portage_exec.userpriv_capable)
		use_fakeroot=(("userpriv_fakeroot" in features) and droppriv and portage_exec.fakeroot_capable)

		# basically a nasty graph of 'w/ this phase, have it userprived/sandboxed/fakeroot', and run
		# these phases prior
		actionmap={
			  "depend": {                "sandbox":False,	"userpriv":True, "fakeroot":False},
			  "setup":  {                "sandbox":True,	"userpriv":False, "fakeroot":False},
			 "unpack":  {"dep":"setup",  "sandbox":sandbox,	"userpriv":True, "fakeroot":False},
			"compile":  {"dep":"unpack", "sandbox":sandbox,"userpriv":True, "fakeroot":False},
			   "test":  {"dep":"compile","sandbox":sandbox,"userpriv":True, "fakeroot":False},
			"install":  {"dep":"test",   "sandbox":(not use_fakeroot or (not use_fakeroot and sandbox)),
									"userpriv":use_fakeroot,"fakeroot":use_fakeroot},
			    "rpm":  {"dep":"install","sandbox":False,	"userpriv":use_fakeroot, "fakeroot":use_fakeroot},
	    		"package":  {"dep":"install", "sandbox":False,	"userpriv":use_fakeroot, "fakeroot":use_fakeroot},
			"merge"	 :  {"dep":"install", "sandbox":True,	"userpriv":False, "fakeroot":False}
		}

		merging=False
		# this shouldn't technically ever be called, get_keys exists for this.
		# left in for compatability while portage.doebuild still exists
		if phase=="depend":
			return retval
		elif phase=="unmerge":
			return unmerge(mysettings["CATEGORY"],mysettings["PF"],myroot,mysettings)
		elif phase in ["fetch","digest","manifest","clean"]:
			return retval
		elif phase=="merge":
			merging=True
		elif phase=="qmerge":
			#no phases ran.
			phase="merge"
			merging=True
#			return merge(mysettings["CATEGORY"],mysettings["PF"],mysettings["D"],mysettings["PORTAGE_BUILDDIR"]+"/build-info",myroot,\
#				mysettings)

		elif phase in ["help","clean","prerm","postrm","preinst","postinst","config"]:
			self.__ebp = request_ebuild_processor(userpriv=False)
			self.__ebp.write("process_ebuild %s" % phase)
			self.__ebp.send_env(mysettings)
			self.__ebp.set_sandbox_state(phase in ["help","clean"])
			self.__ebp.write("start_processing")
			retval = self.__generic_phase([],mysettings)
			release_ebuild_processor(self.__ebp)
			self.__ebp = None
			return not retval

		k=phase
		# represent the phases to run, grouping each phase based upon if it's sandboxed, fakerooted, and userpriv'd
		# ugly at a glance, but remember a processor can run multiple phases now.
		# best to not be wasteful in terms of env saving/restoring, and just run all applicable phases in one shot
		phases=[[[phase]]]
		sandboxed=[[actionmap[phase]["sandbox"]]]
		privs=[(actionmap[phase]["userpriv"],actionmap[phase]["fakeroot"])]

		if allstages:
			while actionmap[k].has_key("dep"):
				k=actionmap[k]["dep"]
				if actionmap[k]["userpriv"] != privs[-1][0] or actionmap[k]["fakeroot"] != privs[-1][1]:
					phases.append([[k]])
					sandboxed.append([actionmap[k]["sandbox"]])
					privs.append((actionmap[k]["userpriv"],actionmap[k]["fakeroot"]))
				elif actionmap[k]["sandbox"] != sandboxed[-1][-1]:
					phases[-1].append([k])
					sandboxed[-1].extend([actionmap[k]["sandbox"]])
				else:
					phases[-1][-1].append(k)
			privs.reverse()
			phases.reverse()
			sandboxed.reverse()
			for x in phases:
				for y in x:
					y.reverse()
				x.reverse()
		# and now we have our phases grouped in parallel to the sandbox/userpriv/fakeroot state.

		all_phases = portage_util.flatten(phases)

#		print "all_phases=",all_phases
#		print "phases=",phases
#		print "sandbox=",sandboxed
#		print "privs=",privs
#		sys.exit(1)
#		print "\n\ndroppriv=",droppriv,"use_fakeroot=",use_fakeroot,"\n\n"

		#temporary hack until sandbox + fakeroot (if ever) play nice.
		while privs:
			if self.__ebp == None or (droppriv and self.__ebp.userprived() != privs[0][0]) or \
				(use_fakeroot and self.__ebp.fakerooted() != privs[0][1]):
				if self.__ebp != None:
					print "swapping processors for",phases[0][0]
					release_ebuild_processor(self.__ebp)
					self.__ebp = None
				opts={}

				#only engage fakeroot when userpriv'd
				if use_fakeroot and privs[0][1]:
					opts["save_file"] = mysettings["T"]+"/fakeroot_db"

				self.__ebp = request_ebuild_processor(userpriv=(privs[0][0] and droppriv), \
					fakeroot=(privs[0][1] and use_fakeroot), \

				sandbox=(not (privs[0][1] and use_fakeroot) and portage_exec.sandbox_capable),**opts)

			#loop through the instances where the processor must have the same sandboxed state-
			#note a sandbox'd process can have it's sandbox disabled.
			#this seperation is needed since you can't mix sandbox and fakeroot atm.
			for sandbox in sandboxed[0]:
				if "merge" in phases[0][0]:
					if len(phases[0][0]) == 1:
						print "skipping this phase, it's just merge"
						continue
					phases[0][0].remove("merge")

				self.__ebp.write("process_ebuild %s" % string.join(phases[0][0]," "))
				self.__ebp.send_env(mysettings)
				self.__ebp.set_sandbox_state(sandbox)
				self.__ebp.write("start_processing")
				phases[0].pop(0)
				retval = not self.__generic_phase([],mysettings)
				if retval:
					release_ebuild_processor(self.__ebp)
					self.__ebp = None
					return retval
			sandboxed.pop(0)
			privs.pop(0)
			phases.pop(0)
		# hey hey. we're done.  Now give it back.
		release_ebuild_processor(self.__ebp)
		self.__ebp = None

		# packaging moved out of ebuild.sh, and into this code.
		# makes it so ebuild.sh no longer must run as root for the package phase.
		if "package" in all_phases:
			print "processing package"
			#mv "${PF}.tbz2" "${PKGDIR}/All" 
			if not os.path.exists(mysettings["PKGDIR"]+"/All"):
				os.makedirs(mysettings["PKGDIR"]+"/All")
			if not os.path.exists(mysettings["PKGDIR"]+"/"+mysettings["CATEGORY"]):
				os.makedirs(mysettings["PKGDIR"]+"/"+mysettings["CATEGORY"])
			if os.path.exists("%s/All/%s.tbz2" % (mysettings["PKGDIR"],mysettings["PF"])):
				os.remove("%s/All/%s.tbz2" % (mysettings["PKGDIR"],mysettings["PF"]))
			retval = not portage_util.movefile("%s/%s.tbz2" % (mysettings["PORTAGE_BUILDDIR"],mysettings["PF"]),
				mysettings["PKGDIR"]+"/All/"+mysettings["PF"]+".tbz2") > 0
			if retval:	return False
			if os.path.exists("%s/%s/%s.tbz2" % (mysettings["PKGDIR"],mysettings["CATEGORY"],mysettings["PF"])):
				os.remove("%s/%s/%s.tbz2" % (mysettings["PKGDIR"],mysettings["CATEGORY"],mysettings["PF"]))
			os.symlink("%s/All/%s.tbz2" % (mysettings["PKGDIR"],mysettings["PF"]),
				"%s/%s/%s.tbz2" % (mysettings["PKGDIR"],mysettings["CATEGORY"],mysettings["PF"]))

		#same as the package phase above, removes the root requirement for the rpm phase.
		if "rpm" in all_phases:
			rpm_name="%s-%s-%s" % (mysettings["PN"],mysettings["PV"],mysettings["PR"])

			retval = not portage_util.movefile("%s/%s.tar.gz" % (mysettings["T"],mysettings["PF"]),
				"/usr/src/redhat/SOURCES/%s.tar.gz" % mysettings["PF"]) > 0
			if retval:
				print "moving src for rpm failed, retval=",retval
				return False

			retval=portage_exec.spawn(("rpmbuild","-bb","%s/%s.spec" % \
				(mysettings["PORTAGE_BUILDDIR"],mysettings["PF"])))
			if retval:
				print "Failed to integrate rpm spec file"
				return retval

			if not os.path.exists(mysettings["RPMDIR"]+"/"+mysettings["CATEGORY"]):
				os.makedirs(mysettings["RPMDIR"]+"/"+mysettings["CATEGORY"])

			retval = not portage_util.movefile("/usr/src/redhat/RPMS/i386/%s.i386.rpm" % rpm_name,
				"%s/%s/%s.rpm" % (mysettings["RPMDIR"],mysettings["CATEGORY"],rpm_name)) > 0
			if retval:
				print "rpm failed"
				return retval


		# not great check, but it works.
		# basically, if FEATURES="-buildpkg" emerge package was called, the files in the current 
		# image directory don't have their actual perms.  so we use an ugly bit of bash
		# to make the fakeroot (claimed) permissions/owners a reality.
		if use_fakeroot and os.path.exists(mysettings["T"]+"/fakeroot_db") and merging:
			print "correcting fakeroot privs"
			retval=portage_exec.spawn(("/usr/lib/portage/bin/affect-fakeroot-perms.sh", \
				mysettings["T"]+"/fakeroot_db", \
				mysettings["D"]),env={"BASHRC":portage_const.INVALID_ENV_FILE})
			if retval or retval == None:
				print red("!!!")+"affecting fakeroot perms after the fact failed"
				return retval

		if merging:
			print "processing merge"
			retval = merge(mysettings["CATEGORY"],mysettings["PF"],mysettings["D"],mysettings["PORTAGE_BUILDDIR"]+"/build-info",myroot,\
				mysettings,myebuild=mysettings["EBUILD"])
		return retval
Exemplo n.º 17
0
def logfe(s):
	m = "Fatal error: " + output.red(s)
	p(m)
Exemplo n.º 18
0
	def printStatus(self, success):
		if (success):	
			print " "*70 + output.white("[") + output.green("done") + output.white("]")
		else:
			print " "*70 + output.white("[") + output.red("failed") + output.white("]")
Exemplo n.º 19
0
from output import green, red

ostype = os.uname()[0]

lchown = None
if ostype == "Linux":
    userland = "GNU"
    os.environ["XARGS"] = "xargs -r"
elif ostype in ["Darwin", "FreeBSD", "OpenBSD"]:
    if ostype == "Darwin":
        lchown = os.chown
    userland = "BSD"
    os.environ["XARGS"] = "xargs"
else:
    writemsg(
        red("Operating system") + " \"" + ostype + "\" " +
        red("currently unsupported. Exiting.") + "\n")
    sys.exit(1)

if not lchown:
    if "lchown" in dir(os):
        # Included in python-2.3
        lchown = os.lchown
    else:
        import missingos
        lchown = missingos.lchown

os.environ["USERLAND"] = userland

#Secpass will be set to 1 if the user is root or in the portage group.
secpass = 0
Exemplo n.º 20
0
	def load_confcache(self,transfer_to,confcache=portage_const.CONFCACHE_FILE,
		confcache_list=portage_const.CONFCACHE_LIST):
		"""verifys a requested conf cache, removing the global cache if it's stale.
		The handler should be the only one to call this"""
		from portage_checksum import perform_md5
		from output import red
		if not self.__ebp:
			import traceback
			traceback.print_stack()
			print "err... no ebp, yet load_confcache called. invalid"
			raise Exception,"load_confcache called yet no running processor.  bug?"

		valid=True
		lock=None
		if not os.path.exists(confcache_list):
			print "confcache file listing doesn't exist"
			valid=False
		elif not os.path.exists(confcache):
			print "confcache doesn't exist"
			valid=False
		else:
			lock=portage_locks.lockfile(confcache_list,wantnewlockfile=1)
			try:
				myf=anydbm.open(confcache_list, "r", 0664)
				for l in myf.keys():
					# file, md5
					if perform_md5(l,calc_prelink=1) != myf[l]:
						print red("***")+" confcache is stale: %s: recorded md5: %s: actual: %s:" % (l,myf[l],perform_md5(l,calc_prelink=1))
						raise Exception("md5 didn't match")
				myf.close()
				# verify env now.
				new_cache=[]
				env_vars=[]
				
				# guessing on THOST.  I'm sure it's wrong...

				env_translate={"build_alias":"CBUILD","host_alias":"CHOST","target_alias":"THOST"}
				cache=portage_util.grabfile(confcache)

				x=0
				while x < len(cache):
					#ac_cv_env
					if cache[x][0:10] == "ac_cv_env_":
						f=cache[x][10:].find("_set")
						if f == -1 or f==11:
							cache.pop(x)
							continue
						env_vars.append(cache[x][10:10 + cache[x][10:].find("_set")])
						x += 1
					else:
						new_cache.append(cache[x])
					x += 1

				for x in env_vars:
					self.__ebp.write("request %s" % env_translate.get(x,x))
					line=self.__ebp.read()
					if line[-1] == "\n":
						line=line[:-1]
					new_cache.append("ac_cv_env_%s_set=%s" % (x, line))
					if line == "unset":
						new_cache.append("ac_cv_env_%s_value=" % x)
					else:
						line=self.__ebp.read()
						if line[-1] == "\n":
							line=line[:-1]
						if line.split()[0] != line:
							#quoting... XXX
							new_cache.append("ac_cv_env_%s_value='%s'" % (x,line))
						else:
							new_cache.append("ac_cv_env_%s_value=%s" % (x,line))

				myf=open(confcache,"w")
				for x in new_cache:
					myf.write(x+"\n")
				myf.close()
						
			except SystemExit, e:
				raise
			except Exception,e:
				print "caught exception: %s" % str(e)
				try:	myf.close()
				except (IOError, OSError):	pass
				valid=False