示例#1
0
文件: web.py 项目: geekbuntu/Quarters
    def get_jobs( self ):
        ''' returns a list of new jobdescriptions '''

        ret = []

        # make sure we can exit safely if the web ui is down
        try:
            json_data = get_url( 'http://localhost:8080/stat' )
        except:
            return ret

        temp_json = bytes.decode( json_data )
        print( 'temp_json is:', temp_json )
        print( 'json_data is:', json_data )
        remote_pkgs = json.loads( temp_json )

        makepkg_cmd = [ 'makepkg', '--source', '--skipinteg' ]
        for rpkg in remote_pkgs:
            # copy over the sources to a temp directory
            #orig_dir = os.path.join( '/var/abs/core', rpkg[ 'pkgname' ] )
            dest_dir = os.path.join( '/tmp', rpkg[ 'uuid' ] )
            # check if we already did this
            if os.path.exists( dest_dir ):
                continue
            svnco_cmd = [ 'svn', 'checkout', 'svn://svn.archlinux.org/packages/' + rpkg[ 'pkgname' ] + '/trunk', rpkg[ 'uuid' ] ]
            proc = subprocess.Popen( svnco_cmd, cwd='/tmp' )
            proc.wait()
            #shutil.copytree( orig_dir, dest_dir )
            pkgbuild_path = os.path.join( '/tmp', rpkg[ 'uuid' ], 'PKGBUILD' )
            pkgbuild_data = pacman.load( pkgbuild_path )

            # build the .src.tar.gz file
            proc = subprocess.Popen( makepkg_cmd, cwd=dest_dir )
            proc.wait()

            # find the resulting .src.tar.gz file
            getsrc = glob.glob( os.path.join( dest_dir, '*.src.tar.gz' ) )
            print( 'glob returned' + str( getsrc ) )
            if len( getsrc ) != 1:
                print( 'error, not enough, or too many srcpkgs detected in Web' )
                # TODO need to create a build log with a message from the scm telling what happened
                self.local_state.create_empty_job( rpkg[ 'uuid' ], 'failed' )
                continue

            # get the sha256sum of the file
            sha256sum = sha256sum_file( getsrc[0] )

            # move the srcpkg to the final resting place
            srcpkg_path = os.path.join( self.master_root, rpkg[ 'uuid' ] )
            os.makedirs( srcpkg_path, exist_ok=True )
            srcpkg_path = os.path.join( srcpkg_path, rpkg[ 'uuid' ] + '.src.tar.gz' )
            shutil.move( getsrc[0], srcpkg_path )

            # add the final jobdescription to the list
            jd = JobDescription( rpkg[ 'uuid' ], rpkg[ 'pkgname' ], sha256sum, pkgbuild_data[ 'arch' ] )
            ret.append( jd )

        return ret
示例#2
0
# Go through each package, get the info, and apply the rules
for package in packages:
	extracted = 0
	if not os.access(package, os.R_OK):
		print "Error: Problem reading " + package
		usage()

	if package[-7:] == '.tar.gz':
		pkgtar = verify_package(package)

		if not pkgtar:
			print "Error: " + package + " is not a package"
			if len(packages) > 1:
				continue

		pkginfo = pacman.load(package)

		# No rules selected?  Then select them all!
		if active_modules == []:
			active_modules = modules

		# Loop through each one, load them apply if possible
		for i in active_modules:
			cur_class = __import__('Namcap.' + i, globals(), locals(), [Namcap])
			pkg = cur_class.package()
			ret = [[],[],[]]
			if pkg.type() == "tarball":
				if pkg.prereq() == "extract":
					# If it's not extracted, then extract it and then analyze the package
					if not extracted:
						os.mkdir(sandbox_directory)
示例#3
0
def load(name, path=None):
    if not pkgcache.has_key(name):
        pkgcache[name] = pacman.load(name)
    return pkgcache[name]
示例#4
0
    def analyze(self, pkginfo, data):
        liblist = [{}, {}]
        dependlist = {}
        smartdepend = {}
        smartprovides = {}
        covereddepend = {}
        pkgcovered = {}
        ret = [[], [], []]
        filllibcache()
        os.environ['LC_ALL'] = 'C'
        os.path.walk(data, scanlibs, liblist)

        # Ldd all the files and find all the link and script dependencies
        dependlist, tmpret = finddepends(liblist[0])

        # Handle "no package associated" errors
        for i in tmpret:
            ret[1].append(i)

        # Do the script handling stuff
        for i, v in liblist[1].iteritems():
            if not dependlist.has_key(i):
                dependlist[i] = {}
            for j in v.keys():
                dependlist[i][j] = 1
            files = [x[len(data) + 1:] for x in v.keys()]
            ret[2].append('Script link detected (' + i + ') in file ' +
                          str(files))

        # Remove the package name from that list, we can't depend on ourselves.
        if dependlist.has_key(pkginfo.name):
            del dependlist[pkginfo.name]

        # Do the info stuff
        for i, v in dependlist.iteritems():
            if type(v) == dict:
                files = [x[len(data) + 1:] for x in v.keys()]
                ret[2].append('File ' + str(files) +
                              ' link-level dependence on ' + i)

        # Check for packages in testing
        if os.path.isdir('/var/lib/pacman/sync/testing'):
            for i in dependlist.keys():
                p = pacman.load(i, '/var/lib/pacman/sync/testing/')
                q = load(i)
                if p != None and q != None and p.version == q.version:
                    ret[1].append('Dependency ' + i +
                                  ' on your system is a testing release')

        # Find all the covered dependencies from the PKGBUILD
        pkgdepend = {}
        if hasattr(pkginfo, 'depends'):
            for i in pkginfo.depends:
                pkgdepend[i] = 1

        # Include the optdepends from the PKGBUILD
        if hasattr(pkginfo, 'optdepends'):
            for i in pkginfo.optdepends:
                pkgdepend[i] = 1

        getcovered(None, pkgdepend, pkgcovered)

        # Do tree walking to find all the non-leaves (branches?)
        getcovered(None, dependlist, covereddepend)
        for i in covereddepend.keys():
            ret[2].append(
                'Dependency covered by dependences from link dependence (' +
                i + ')')
        # Set difference them to find the leaves
        for i in dependlist.keys():
            if not i in covereddepend.keys():
                smartdepend[i] = 1

        # Get the provides so we can reference them later
        getprovides(dependlist, smartprovides)

        # Do the actual message outputting stuff
        for i in smartdepend.keys():
            # If (i is not in the PKGBUILD's dependencies
            # and i isn't the package name
            # and ((there are provides for i
            # and those provides aren't included in the package's dependencies)
            # or there are no provides for i))
            all_dependencies = getattr(pkginfo, 'depends', []) + getattr(
                pkginfo, 'optdepends', [])
            if (i not in all_dependencies and i != pkginfo.name
                    and ((smartprovides.has_key(i) and len([
                        c for c in smartprovides[i] if c in pkgcovered.keys()
                    ]) == 0) or not smartprovides.has_key(i))):
                if type(dependlist[i]) == dict:
                    ret[0].append(
                        'Dependency detected and not included (' + i +
                        ') from files ' +
                        str([x[len(data) + 1:] for x in dependlist[i].keys()]))
                else:
                    ret[0].append('Dependency detected and not included (' +
                                  i + ')')
        if hasattr(pkginfo, 'depends'):
            for i in pkginfo.depends:
                if covereddepend.has_key(i) and dependlist.has_key(i):
                    ret[1].append(
                        'Dependency included but already satisfied (' + i +
                        ')')
                # if i is not in the depends as we see them and it's not in any of the provides from said depends
                elif not smartdepend.has_key(i) and i not in [
                        y for x in smartprovides.values() for y in x
                ]:
                    ret[1].append('Dependency included and not needed (' + i +
                                  ')')
        ret[2].append('Depends as namcap sees them: depends=(' +
                      ' '.join(smartdepend.keys()) + ')')
        return ret
示例#5
0
文件: depends.py 项目: abhidg/namcap
def load(name, path=None):
	if not pkgcache.has_key(name):
		pkgcache[name] = pacman.load(name)
	return pkgcache[name]
示例#6
0
文件: depends.py 项目: abhidg/namcap
	def analyze(self, pkginfo, data):
		liblist = [{},{}]
		dependlist = {}
		smartdepend = {}
		smartprovides = {}
		covereddepend = {}
		pkgcovered = {}
		ret = [[],[],[]]
		filllibcache()
		os.environ['LC_ALL'] = 'C'
		os.path.walk(data, scanlibs, liblist)

		# Ldd all the files and find all the link and script dependencies
		dependlist, tmpret = finddepends(liblist[0])

		# Handle "no package associated" errors
		for i in tmpret:
			ret[1].append(i)

		# Do the script handling stuff
		for i, v in liblist[1].iteritems():
			if not dependlist.has_key(i):
				dependlist[i] = {}
			for j in v.keys():
				dependlist[i][j] = 1
			files = [x[len(data)+1:] for x in v.keys()]
			ret[2].append(("script-link-detected %s in %s", (i, str(files))))

		# Remove the package name from that list, we can't depend on ourselves.
		if dependlist.has_key(pkginfo.name):
			del dependlist[pkginfo.name]

		# Do the info stuff
		for i, v in dependlist.iteritems():
			if type(v) == dict:
				files = [x[len(data)+1:] for x in v.keys()]
				ret[2].append(("link-level-dependence %s on %s", (str(files), i)))

		# Check for packages in testing
		if os.path.isdir('/var/lib/pacman/sync/testing'):
			for i in dependlist.keys():
				p = pacman.load(i, '/var/lib/pacman/sync/testing/')
				q = load(i)
				if p != None and q != None and p.version == q.version:
					ret[1].append(("dependency-is-testing-release %s", i))

		# Find all the covered dependencies from the PKGBUILD
		pkgdepend = {}
		if hasattr(pkginfo, 'depends'):
			for i in pkginfo.depends:
				pkgdepend[i] = 1

		# Include the optdepends from the PKGBUILD
		if hasattr(pkginfo, 'optdepends'):
			for i in pkginfo.optdepends:
				pkgdepend[i] = 1

		getcovered(None, pkgdepend, pkgcovered)

		# Do tree walking to find all the non-leaves (branches?)
		getcovered(None, dependlist, covereddepend)
		for i in covereddepend.keys():
			ret[2].append(("dependency-covered-by-link-dependence %s", i))

		# Set difference them to find the leaves
		for i in dependlist.keys():
			if not i in covereddepend.keys():
				smartdepend[i] = 1

		# Get the provides so we can reference them later
		getprovides(dependlist, smartprovides)

		# Do the actual message outputting stuff
		for i in smartdepend.keys():
			# If (i is not in the PKGBUILD's dependencies
			# and i isn't the package name
			# and ((there are provides for i
			# and those provides aren't included in the package's dependencies)
			# or there are no provides for i))
			all_dependencies = getattr(pkginfo, 'depends', []) + getattr(pkginfo, 'optdepends', [])
			if (i not in all_dependencies and i != pkginfo.name and ((smartprovides.has_key(i) and len([c for c in smartprovides[i] if c in pkgcovered.keys()]) == 0) or not smartprovides.has_key(i))):
					if type(dependlist[i]) == dict:
						ret[0].append(("dependency-detected-not-included %s from files %s", (i, str([x[len(data)+1:] for x in dependlist[i].keys()])) ))
					else:
						ret[0].append(("dependency-detected-not-included %s", i))
		if hasattr(pkginfo, 'depends'):
			for i in pkginfo.depends:
				if covereddepend.has_key(i) and dependlist.has_key(i):
					ret[1].append(("dependency-already-satisfied %s", i))
				# if i is not in the depends as we see them and it's not in any of the provides from said depends
				elif not smartdepend.has_key(i) and i not in [y for x in smartprovides.values() for y in x]:
					ret[1].append(("dependency-not-needed %s", i))
		ret[2].append(("depends-by-namcap-sight depends=(%s)", ' '.join(smartdepend.keys()) ))
		return ret
示例#7
0
# Go through each package, get the info, and apply the rules
for package in packages:
	extracted = 0
	if not os.access(package, os.R_OK):
		print "Error: Problem reading " + package
		usage()

	if package[-7:] == '.tar.gz':
		pkgtar = verify_package(package)

		if not pkgtar:
			print "Error: " + package + " is not a package"
			if len(packages) > 1:
				continue

		pkginfo = pacman.load(package)

		# No rules selected?  Then select them all!
		if active_modules == []:
			active_modules = modules

		# Loop through each one, load them apply if possible
		for i in active_modules:
			cur_class = __import__('Namcapp.' + i, globals(), locals(), [Namcap])
			pkg = cur_class.package()
			ret = [[],[],[]]
			if pkg.type() == "tarball":
				tar_tvf = []
	    		get_output = subprocess.Popen("tar -tvf" + package, shell=True, stdout = subprocess.PIPE, stderr = subprocess.subprocess.PIPE).communicate()
				split_output = get_output[0].split('\n')
示例#8
0
	def analyze(self, pkginfo, data, tar_tvf):
		liblist = [{},{}]
		dependlist = {}
		smartdepend = {}
		smartprovides = {}
		covereddepend = {}
		pkgcovered = {}
		ret = [[],[],[]]
		extract_required(tar_tvf)
		filllibcache()
		os.environ['LC_ALL'] = 'C'
		os.path.walk(data, scanlibs, liblist)

		# Ldd all the files and find all the link and script dependencies
		dependlist, tmpret = finddepends(liblist[0])  # it is liblist[0] because [0], the first dict, contains shared libs, and the second contains script dependencies

		# Handle "no package associated" errors
		for i in tmpret:
			ret[1].append(i)

		# Do the script handling stuff
		for i, v in liblist[1].iteritems():
			if not dependlist.has_key(i):
				dependlist[i] = {}
			for j in v.keys():
				dependlist[i][j] = 1
			files = [x[len(data)+1:] for x in v.keys()]
			ret[2].append('Script link detected (' + i + ') in file ' + str(files))

		# Remove the package name from that list, we can't depend on ourselves.
		if dependlist.has_key(pkginfo.name):
			del dependlist[pkginfo.name]

		# Do the info stuff
		# THIS IS NEXT
		for i, v in dependlist.iteritems():
			if type(v) == dict:
				files = [x[len(data)+1:] for x in v.keys()]
				ret[2].append('File '+ str(files) +' link-level dependence on ' + i)

		# Check for packages in testing
		if os.path.isdir('/var/lib/pacman/sync/testing'):
			for i in dependlist.keys():
				p = pacman.load(i, '/var/lib/pacman/sync/testing/')
				q = load(i)
				if p != None and q != None and p.version == q.version:
					ret[1].append('Dependency ' + i + ' on your system is a testing release')

		# Find all the covered dependencies from the PKGBUILD
		pkgdepend = {}
		if hasattr(pkginfo, 'depends'):
			for i in pkginfo.depends:
				pkgdepend[i] = 1

		# Include the optdepends from the PKGBUILD
		if hasattr(pkginfo, 'optdepends'):
			for i in pkginfo.optdepends:
				pkgdepend[i] = 1

		getcovered(None, pkgdepend, pkgcovered)

		# Do tree walking to find all the non-leaves (branches?)
		#find all dependencies of dependencies of the shared libraries found from the actual files
		getcovered(None, dependlist, covereddepend)
		for i in covereddepend.keys():
			ret[2].append('Dependency covered by dependences from link dependence (' + i + ')')
		# Set difference them to find the leaves
		for i in dependlist.keys():
			if not i in covereddepend.keys():
				smartdepend[i] = 1

		# Get the provides so we can reference them later
		getprovides(dependlist, smartprovides)

		# Do the actual message outputting stuff
		for i in smartdepend.keys():
			# If (i is not in the PKGBUILD's dependencies
			# and i isn't the package name
			# and ((there are provides for i
			# and those provides aren't included in the package's dependencies)
			# or there are no provides for i))
			all_dependencies = getattr(pkginfo, 'depends', []) + getattr(pkginfo, 'optdepends', [])
			if (i not in all_dependencies and i != pkginfo.name and ((smartprovides.has_key(i) and len([c for c in smartprovides[i] if c in pkgcovered.keys()]) == 0) or not smartprovides.has_key(i))):
					if type(dependlist[i]) == dict:
						ret[0].append('Dependency detected and not included ('+i+') from files '+str([x[len(data)+1:] for x in dependlist[i].keys()]))
					else:
						ret[0].append('Dependency detected and not included ('+i+')')
		if hasattr(pkginfo, 'depends'):
			for i in pkginfo.depends:
				if covereddepend.has_key(i) and dependlist.has_key(i):
					ret[1].append('Dependency included but already satisfied ('+i+')')
				# if i is not in the depends as we see them and it's not in any of the provides from said depends
				elif not smartdepend.has_key(i) and i not in [y for x in smartprovides.values() for y in x]:
					ret[1].append('Dependency included and not needed ('+i+')')
		ret[2].append('Depends as namcap sees them: depends=('+ ' '.join(smartdepend.keys())+')')
		return ret