def readPkgDBCommits(self, deps): """ For a list of depedencies, get corresponding commits from rawhide. 1) Transform import path to package name 2) For each package get its rawhide commit 3) Get a list of rawhide commits for each dependency """ commits = {} for ip in deps: pkgname = self.importpath2pkgname(ip) # cache #json_deps = [] #with open("/tmp/test/cache.json", 'r') as file: # json_deps = json.loads(file.read()) #if ip in json_deps: # commits[ip] = json_deps[ip] # continue rsp = RemoteSpecParser("master", pkgname) rawhide_commit = "" #if not rsp.parse(): # print "Unable to get commit for %s" % pkgname if self.verbose: print "Retrieving %s ..." % pkgname if rsp.parse(): rawhide_commit = rsp.getPackageCommits() print "\"%s\": \"%s\"," % (ip, rawhide_commit) commits[ip] = rawhide_commit return commits
def updatePackages(self, outdated_packages): err = [] if outdated_packages == []: err.append("No outdated packages to add") return err, False # update import paths mapping = IPMap().loadIMap() for pkg in outdated_packages: rsp_obj = RemoteSpecParser('master', pkg) if not rsp_obj.parse(): continue provides = rsp_obj.getProvides() imap = inverseMap(provides) for arg in imap: for image in imap[arg]: mapping[arg] = (pkg, image) if not IPMap().saveIMap(mapping): err.append( "Unable to save mapping of import paths of updated packages") return err, False IPMap().flush() return err, True
def updatePackages(self, outdated_packages): err = [] if outdated_packages == []: err.append("No outdated packages to add") return err, False # update import paths mapping = IPMap().loadIMap() for pkg in outdated_packages: rsp_obj = RemoteSpecParser('master', pkg) if not rsp_obj.parse(): continue provides = rsp_obj.getProvides() imap = inverseMap(provides) for arg in imap: for image in imap[arg]: mapping[arg] = (pkg, image) if not IPMap().saveIMap(mapping): err.append("Unable to save mapping of import paths of updated packages") return err, False IPMap().flush() return err, True
def readPkgDBCommits(self, deps): """ For a list of depedencies, get corresponding commits from rawhide. 1) Transform import path to package name 2) For each package get its rawhide commit 3) Get a list of rawhide commits for each dependency """ commits = {} for ip in deps: pkgname = self.importpath2pkgname(ip) # cache json_deps = [] with open("/tmp/test/cache.json", 'r') as file: json_deps = json.loads(file.read()) if ip in json_deps: commits[ip] = json_deps[ip] continue rsp = RemoteSpecParser("master", pkgname) rawhide_commit = "" #if not rsp.parse(): # print "Unable to get commit for %s" % pkgname if self.verbose: print "Retrieving %s ..." % pkgname if rsp.parse(): rawhide_commit = rsp.getPackageCommits() print "\"%s\": \"%s\"," % (ip, rawhide_commit) commits[ip] = rawhide_commit return commits
def addPackages(self, new_packages): err = [] if new_packages == []: err.append("No new packages to add") return err, False # update golang.repos new_repos = {} checked_packages = [] for pkg in new_packages: rsp_obj = RemoteSpecParser('master', pkg) if not rsp_obj.parse(): continue url = rsp_obj.getPkgURL() if url == "": err.append("Unable to get URL tag from %s's spec file" % pkg) continue # remove all / at the end of url url = re.sub(r'[/]+$', '', url) # change http to https url = re.sub(r'^http://', 'https://', url) if not url.startswith('https://'): url = "https://%s" % url # remove godoc.org/ url = re.sub(r'https://godoc.org/', 'https://', url) checked_packages.append(pkg) # BUILD go-spew https://github.com/davecgh/go-spew.git dir = os.path.basename(url) # git or hg? repo = "" # github if url.startswith("https://github.com"): repo = "%s.git" % url # bitbucker, googlecode, ... else: repo = url new_repos[pkg] = (dir, repo) new_packages = checked_packages # get current packages curr_pkgs = self.loadPackages() for pkg in new_packages: curr_pkgs.append(pkg) # add new packages if not self.savePackages(sorted(curr_pkgs)): err.append("Unable to save new packages") return err, False repos = Repos().loadRepos() for repo in new_repos: repos[repo] = new_repos[repo] if not Repos().saveRepos(repos): err.append("Unable to save new repositories") return err, False # update import paths mapping = IPMap().loadIMap() for pkg in new_packages: rsp_obj = RemoteSpecParser('master', pkg) if not rsp_obj.parse(): continue provides = rsp_obj.getProvides() imap = inverseMap(provides) for arg in imap: for image in imap[arg]: mapping[arg] = (pkg, image) if not IPMap().saveIMap(mapping): err.append( "Unable to save mapping of import paths of new packages") return err, False self.flush() Repos().flush() IPMap().flush() return err, True
def addPackages(self, new_packages): err = [] if new_packages == []: err.append("No new packages to add") return err, False # update golang.repos new_repos = {} checked_packages = [] for pkg in new_packages: rsp_obj = RemoteSpecParser('master', pkg) if not rsp_obj.parse(): continue url = rsp_obj.getPkgURL() if url == "": err.append("Unable to get URL tag from %s's spec file" % pkg) continue # remove all / at the end of url url = re.sub(r'[/]+$', '', url) # change http to https url = re.sub(r'^http://', 'https://', url) if not url.startswith('https://'): url = "https://%s" % url # remove godoc.org/ url = re.sub(r'https://godoc.org/', 'https://', url) checked_packages.append(pkg) # BUILD go-spew https://github.com/davecgh/go-spew.git dir = os.path.basename(url) # git or hg? repo = "" # github if url.startswith("https://github.com"): repo = "%s.git" % url # bitbucker, googlecode, ... else: repo = url new_repos[pkg] = (dir, repo) new_packages = checked_packages # get current packages curr_pkgs = self.loadPackages() for pkg in new_packages: curr_pkgs.append(pkg) # add new packages if not self.savePackages(sorted(curr_pkgs)): err.append("Unable to save new packages") return err, False repos = Repos().loadRepos() for repo in new_repos: repos[repo] = new_repos[repo] if not Repos().saveRepos(repos): err.append("Unable to save new repositories") return err, False # update import paths mapping = IPMap().loadIMap() for pkg in new_packages: rsp_obj = RemoteSpecParser('master', pkg) if not rsp_obj.parse(): continue provides = rsp_obj.getProvides() imap = inverseMap(provides) for arg in imap: for image in imap[arg]: mapping[arg] = (pkg, image) if not IPMap().saveIMap(mapping): err.append("Unable to save mapping of import paths of new packages") return err, False self.flush() Repos().flush() IPMap().flush() return err, True