def resolve(self, filters, package ): """ @filters (list) list of filters @package (list ) list with packages name for resolve """ log.info("======== resolve =========") log.info("filters: %s package:%s " % (filters, package)) cache = Cache() pkg_dict = cache.resolve( package[0] ) if pkg_dict is None: self.error(ERROR_INTERNAL_ERROR, "Package Not found on repository") filter = ConaryFilter(filters) installed = filter._pkg_is_installed( pkg_dict["name"] ) conary_cli = ConaryPk() troveTuple = conary_cli.request_query( package[0] ) log.info(">>> %s" % troveTuple) if installed: filter.add_installed( troveTuple ) else: filter.add_available( troveTuple ) package_list = filter.post_process() log.info("package_list %s" % package_list) self._show_package_list(package_list)
def check_installed(self, troveTuple): log.debug("============check installed =========") cli = ConaryPk() result = cli.query(troveTuple[0]) if result: installed = INFO_INSTALLED else: installed = INFO_AVAILABLE return installed
def get_package_from_id(self, package_id): """ package_id(string) = "dpaster;0.1-3-1;x86;/foresight.rpath.org@fl:2-qa/0.1-3-1#{'version': '0.1-3-1', 'category': [], 'name': 'dpaster', 'label': 'foresight.rpath.org@fl:2-qa'}" """ log.info("=========== get package from package_id ======================") name, verString, archString, data = pkpackage.get_package_from_id(package_id) summary = data.split("#") repo = summary[0] metadata = eval(summary[1]) cli = ConaryPk() return cli.request_query(name)
def _pkg_is_installed(self, pkg): ''' Return if the packages are installed ''' conary_cli = ConaryPk() result = conary_cli.query(pkg) if result: return True else: return False
def _fetchXML(self ): con = ConaryPk() labels = con.get_labels_from_config() for i in labels: label = i + '.xml' filename = self.xml_path + label wwwfile = self.server + label try: wget = url.urlopen( wwwfile ) except: Pk = PackageKitBaseBackend("") Pk.error(ERROR_NO_CACHE," %s can not open" % wwwfile) openfile = open( filename ,'w') openfile.writelines(wget.readlines()) openfile.close()
def update(self, package_ids): ''' Implement the {backend}-update functionality ''' self.allow_cancel(True) self.percentage(0) self.status(STATUS_RUNNING) for package in package_ids.split(" "): name, version, flavor, installed = self._findPackage(package) if name: # self._do_package_update(name, version, flavor) cli = ConaryPk() cli.update(name) else: self.error(ERROR_PACKAGE_ALREADY_INSTALLED, 'No available updates')
def __init__(self): self.conarypk = ConaryPk() self.labels = (x for x in self.conarypk.get_labels_from_config()) self.pk = PackageKitBaseBackend("") if not os.path.isdir(self.dbPath): os.makedirs(self.dbPath) if not os.path.isdir(self.jobPath): os.mkdir(self.jobPath) if not os.path.isdir(self.xml_path): os.makedirs(self.xml_path) for label in self.labels: if not os.path.exists(self.xml_path + label + ".xml"): self._fetchXML(label) self.repos.append(XMLRepo(label + ".xml", self.xml_path, self.pk))
def __init__(self): con = ConaryPk() labels = con.get_labels_from_config() if not os.path.isdir(self.dbPath): os.makedirs(self.dbPath) if not os.path.isdir(self.jobPath): os.mkdir(self.jobPath) if not os.path.isdir( self.xml_path ): os.makedirs(self.xml_path ) for xml_file in labels: if not os.path.exists( self.xml_path + xml_file + ".xml" ): self._fetchXML() for xml_file in labels : self.repos.append(XMLRepo( xml_file + ".xml", self.xml_path ))
def __init__(self, args): PackageKitBaseBackend.__init__(self, args) # conary configurations conary = ConaryPk() self.cfg = conary.cfg self.client = conary.cli self.conary = conary self.callback = UpdateCallback(self, self.cfg) self.client.setUpdateCallback(self.callback) self.xmlcache = XMLCache()
def __init__(self): self.conarypk = ConaryPk() self.labels = ( x for x in self.conarypk.get_labels_from_config() ) self.pk = PackageKitBaseBackend("") if not os.path.isdir(self.dbPath): os.makedirs(self.dbPath) if not os.path.isdir(self.jobPath): os.mkdir(self.jobPath) if not os.path.isdir( self.xml_path ): os.makedirs(self.xml_path ) for label in self.labels: if not os.path.exists( self.xml_path + label + ".xml" ): self._fetchXML(label) self.repos.append(XMLRepo( label + ".xml", self.xml_path, self.pk ))
def init(label, fileoutput): conarypk = ConaryPk() cli = conarypk.cli cfg = conarypk.cfg log.info("Attempting to retrieve repository data for %s" % label) try: pkgs = getPackagesFromLabel(cfg, cli, label) troves = conarypk.repos.getTroves(pkgs, withFiles=False) nodes = generate_xml(troves, label) cElementTree.ElementTree(nodes).write(fileoutput) log.info("Successfully wrote XML data for label %s into file %s" % (label, fileoutput)) except: log.error("Failed to gather data from the repository")
class XMLCache: # Let's only get XML data from things that we support. # XXX We really should replace this with the Conary # RESTful API real soon now. pregenerated_XML_labels = ( 'conary.rpath.com@rpl:2-qa', 'foresight.rpath.org@fl:2', 'foresight.rpath.org@fl:2-qa', 'foresight.rpath.org@fl:2-devel', 'foresight.rpath.org@fl:2-kernel', 'foresight.rpath.org@fl:2-qa-kernel', 'foresight.rpath.org@fl:2-devel-kernel', ) server = "http://packages.foresightlinux.org/cache/" repos = [] dbPath = '/var/cache/conary/' jobPath = dbPath + 'jobs' xml_path = dbPath + "xmlrepo/" def __init__(self): self.conarypk = ConaryPk() self.labels = ( x for x in self.conarypk.get_labels_from_config() ) self.pk = PackageKitBaseBackend("") if not os.path.isdir(self.dbPath): os.makedirs(self.dbPath) if not os.path.isdir(self.jobPath): os.mkdir(self.jobPath) if not os.path.isdir( self.xml_path ): os.makedirs(self.xml_path ) for label in self.labels: if not os.path.exists( self.xml_path + label + ".xml" ): self._fetchXML(label) self.repos.append(XMLRepo( label + ".xml", self.xml_path, self.pk )) def _getJobCachePath(self, applyList): applyStr = '\0'.join(['%s=%s[%s]--%s[%s]%s' % (x[0], x[1][0], x[1][1], x[2][0], x[2][1], x[3]) for x in applyList]) return self.jobPath + '/' + sha1helper.sha1ToString(sha1helper.sha1String(applyStr)) def checkCachedUpdateJob(self, applyList): jobPath = self._getJobCachePath(applyList) log.info("CheckjobPath %s" % jobPath) if os.path.exists(jobPath): return jobPath def cacheUpdateJob(self, applyList, updJob): jobPath = self._getJobCachePath(applyList) log.info("jobPath %s" % jobPath) if os.path.exists(jobPath): log.info("deleting the JobPath %s "% jobPath) util.rmtree(jobPath) log.info("end deleting the JobPath %s "% jobPath) log.info("making the logPath ") os.mkdir(jobPath) log.info("freeze JobPath") updJob.freeze(jobPath) log.info("end freeze JobPath") def convertTroveToDict(self, troveTupleList): mList = [] for troveTuple in troveTupleList: pkg = {} pkg["name"] = troveTuple[0] pkg["version"] = troveTuple[1].trailingRevision() pkg["label"] = troveTuple[1].trailingLabel() mList.append(pkg) return mList def searchByGroups(self, groups): pass def refresh(self): for label in self.labels: self._fetchXML(label) def resolve(self, name ): for repo in self.repos: r = repo.resolve(name) if r: return r else: return None def search(self, search, where = "name" ): """ @where (string) values = name | details | group | all """ repositories_result = [] log.info("%s " % search) if search is not list: search = [search] for repo in self.repos: for s in search: results = repo.search(s , where ) for i in results: repositories_result.append(i) return self.list_set( repositories_result) def resolve_list(self, search_list ): r = [] for repo in self.repos: res = repo.resolve_list( search_list ) for i in res: r.append( i) return self.list_set( r ) def list_set(self, repositories_result ): names = set( [i["name"] for i in repositories_result] ) #log.info("names>>>>>>>>>>>>>>>>>>>>><") #log.info(names) results = [] for i in repositories_result: # log.info(i["name"]) if i["name"] in names: results.append(i) names.remove(i["name"]) #log.debug([i["name"] for i in results ] ) return results def _fetchXML(self, label): log.info("Updating XMLCache for label %s" % label) filename = label + '.xml' filepath = self.xml_path + filename if label in self.pregenerated_XML_labels: wwwfile = self.server + filename try: wget = url.urlopen( wwwfile ) openfile = open(filepath,'w') openfile.writelines(wget.readlines()) openfile.close() except: self.pk.error(ERROR_NO_NETWORK,"%s can not open" % wwwfile) else: generateXML.init(label,filepath,self.conarypk) def getGroup(self,categorieList): return getGroup(categorieList) def _getCategorieBase(self, mapDict, categorieList ): if not categorieList: return None tempDict = {} for cat in categorieList: if mapDict.has_key(cat): map = mapDict[cat] else: continue if tempDict.has_key(map): tempDict[map] = tempDict[map] + 1 else: tempDict[map] = 1 tmp = 0 t_key = "" for key, value in tempDict.items(): if value > tmp: t_key = key tmp = value return t_key def _getAllCategories(self): categories = [] for i in self.repos: pkgs = i._getAllPackages() for pkg in pkgs: if pkg.has_key('category'): for cat in pkg["category"]: categories.append(cat) categories.sort() return set( categories )
class XMLCache: # Let's only get XML data from things that we support. # XXX We really should replace this with the Conary # RESTful API real soon now. pregenerated_XML_labels = ( 'conary.rpath.com@rpl:2-qa', 'foresight.rpath.org@fl:2', 'foresight.rpath.org@fl:2-qa', 'foresight.rpath.org@fl:2-devel', 'foresight.rpath.org@fl:2-kernel', 'foresight.rpath.org@fl:2-qa-kernel', 'foresight.rpath.org@fl:2-devel-kernel', ) server = "http://packages.foresightlinux.org/cache/" repos = [] dbPath = '/var/cache/conary/' jobPath = dbPath + 'jobs' xml_path = dbPath + "xmlrepo/" def __init__(self): self.conarypk = ConaryPk() self.labels = (x for x in self.conarypk.get_labels_from_config()) self.pk = PackageKitBaseBackend("") if not os.path.isdir(self.dbPath): os.makedirs(self.dbPath) if not os.path.isdir(self.jobPath): os.mkdir(self.jobPath) if not os.path.isdir(self.xml_path): os.makedirs(self.xml_path) for label in self.labels: if not os.path.exists(self.xml_path + label + ".xml"): self._fetchXML(label) self.repos.append(XMLRepo(label + ".xml", self.xml_path, self.pk)) def _getJobCachePath(self, applyList): applyStr = '\0'.join([ '%s=%s[%s]--%s[%s]%s' % (x[0], x[1][0], x[1][1], x[2][0], x[2][1], x[3]) for x in applyList ]) return self.jobPath + '/' + sha1helper.sha1ToString( sha1helper.sha1String(applyStr)) def checkCachedUpdateJob(self, applyList): jobPath = self._getJobCachePath(applyList) log.info("CheckjobPath %s" % jobPath) if os.path.exists(jobPath): return jobPath def cacheUpdateJob(self, applyList, updJob): jobPath = self._getJobCachePath(applyList) log.info("jobPath %s" % jobPath) if os.path.exists(jobPath): log.info("deleting the JobPath %s " % jobPath) util.rmtree(jobPath) log.info("end deleting the JobPath %s " % jobPath) log.info("making the logPath ") os.mkdir(jobPath) log.info("freeze JobPath") updJob.freeze(jobPath) log.info("end freeze JobPath") def convertTroveToDict(self, troveTupleList): mList = [] for troveTuple in troveTupleList: pkg = {} pkg["name"] = troveTuple[0] pkg["version"] = troveTuple[1].trailingRevision() pkg["label"] = troveTuple[1].trailingLabel() mList.append(pkg) return mList def searchByGroups(self, groups): pass def refresh(self): for label in self.labels: self._fetchXML(label) def resolve(self, name): for repo in self.repos: r = repo.resolve(name) if r: return r else: return None def search(self, search, where="name"): """ @where (string) values = name | details | group | """ repositories_result = [] for repo in self.repos: results = repo.search(search, where) for i in results: repositories_result.append(i) return self.list_set(repositories_result) def resolve_list(self, search_list): r = [] for repo in self.repos: res = repo.resolve_list(search_list) for i in res: r.append(i) return self.list_set(r) def list_set(self, repositories_result): names = set([i["name"] for i in repositories_result]) #log.info("names>>>>>>>>>>>>>>>>>>>>><") #log.info(names) results = [] for i in repositories_result: # log.info(i["name"]) if i["name"] in names: results.append(i) names.remove(i["name"]) #log.debug([i["name"] for i in results ] ) return results def _fetchXML(self, label): log.info("Updating XMLCache for label %s" % label) filename = label + '.xml' filepath = self.xml_path + filename if label in self.pregenerated_XML_labels: wwwfile = self.server + filename try: wget = url.urlopen(wwwfile) openfile = open(filepath, 'w') openfile.writelines(wget.readlines()) openfile.close() except: self.pk.error(ERROR_NO_NETWORK, "%s can not open" % wwwfile) else: generateXML.init(label, filepath, self.conarypk) def getGroup(self, categorieList): return getGroup(categorieList) def _getCategorieBase(self, mapDict, categorieList): if not categorieList: return None tempDict = {} for cat in categorieList: if mapDict.has_key(cat): map = mapDict[cat] else: continue if tempDict.has_key(map): tempDict[map] = tempDict[map] + 1 else: tempDict[map] = 1 tmp = 0 t_key = "" for key, value in tempDict.items(): if value > tmp: t_key = key tmp = value return t_key def _getAllCategories(self): categories = [] for i in self.repos: pkgs = i._getAllPackages() for pkg in pkgs: if pkg.has_key('category'): for cat in pkg["category"]: categories.append(cat) categories.sort() return set(categories)