def setUp(self): self.repo = mkdtemp(prefix='local-repo-test-repo-') l, self.conf = mkstemp(prefix='local-repo-test-conf-') with open(self.conf, 'w') as f: f.write(ConfigTest.CONFIG.format(path=self.repo)) Config.init('test', path=self.conf)
def setUp(self): self.repo = mkdtemp(prefix='local-repo-test-repo-') self.log = join(self.repo, '.data', 'log', 'some', 'path') self.buildlog = join(self.repo, '.data', 'buildlog', 'path', 'yay') self.pkgbuild = join(self.repo, '.data', 'pkgbuild', 'test') Config.init('logtest') Config.set('path', self.repo) Config.set('log', self.log) Config.set('buildlog', self.buildlog) Config.set('pkgbuild', self.pkgbuild)
def init(path, config_file=Config.CONFIGFILE): ''' Needs the path to repo, or the repo name if specified in the config file ''' try: Config.init(path, config_file) LocalRepo._repo = Repo(Config.get('path', path)) Log.init(LocalRepo._repo.path) BuildLog.init(LocalRepo._repo.path) PkgbuildLog.init(LocalRepo._repo.path) except LocalRepoError as e: LocalRepo.error(e)
def test_make_package(self): Config.init('mytestrepo') Config.set('sign', False) Config.set('buildlog', '') Pacman.make_package('/tmp') self.assertEqual('/usr/bin/makepkg -d --nosign', PacmanTest.cmd) Pacman.make_package('/tmp', force=True) self.assertEqual('/usr/bin/makepkg -d -f --nosign', PacmanTest.cmd) Config.set('buildlog', '/some/path') Pacman.make_package('/tmp') self.assertEqual('/usr/bin/makepkg -d -L -m --nosign', PacmanTest.cmd) Config.set('sign', True) Pacman.make_package('/tmp') self.assertEqual('/usr/bin/makepkg -d -L -m --sign', PacmanTest.cmd)
def _install_deps(names): ''' Installs missing dependencies ''' aurs, officials = [], names Msg.info(_('Need following packages as dependencies:')) if Config.get('check-deps-from-aur', True): aur_checks = Pacman.check_from_aur(names) aurs = list(filter(lambda k: aur_checks[k], aur_checks)) officials = list(filter(lambda k: not aur_checks[k], aur_checks)) for i in [("AUR", aurs), ("Official", officials)]: if i[1]: Msg.info( _('{0} repository: [{1}]'.format( i[0], ', '.join(i[1])))) else: Msg.info(_('[{0}]'.format(', '.join(names)))) if not Msg.ask(_('Install?')): if Msg.ask(_('Try without installing dependencies?')): return False Msg.info(_('Bye')) LocalRepo.shutdown(1) try: if aurs: LocalRepo.aur_add(aurs) Pacman.install(officials, as_deps=True) return True except LocalRepoError as e: LocalRepo.error(e)
def test_get(self): self.assertIs(True, Config.get('sign')) self.assertIs(False, Config.get('signdb')) self.assertEqual(self.repo, Config.get('path')) self.assertEqual('.repo/log', Config.get('log')) self.assertEqual('/path/to/buildlog', Config.get('buildlog')) self.assertEqual(['pkg1', 'pkg2', 'pkg3'], Config.get('no-aur-upgrade')) self.assertIs(None, Config.get('pkgbuild')) self.assertEqual('default', Config.get('pkgbuild', 'default'))
def rebuild(names): ''' Rebuilds the specified packages ''' if not Config.get('pkgbuild', False): LocalRepo.error( _('Please specify \'pkgbuild\' in your config file!')) LocalRepo.add([PkgbuildLog.log_dir(name) for name in names], force=True)
def _repo_script(script, db, pkgs): ''' Calls one of the repo- scripts ''' cmd = [script, db] + pkgs if Config.get('signdb', False): cmd += ['--verify', '--sign'] Pacman.call(cmd)
def __init__(self, path): ''' Creates a repo object and loads the package list ''' self._db = self.find_db(path) self._path = dirname(self._db) self._packages = {} self._cache = Config.get('cache', Repo.CACHE) if not isabs(self._cache): self._cache = join(self._path, self._cache)
def __init__(self, path): """ Creates a repo object and loads the package list """ self._db = self.find_db(path) self._path = dirname(self._db) self._packages = {} self._cache = Config.get("cache", Repo.CACHE) if not isabs(self._cache): self._cache = join(self._path, self._cache)
def from_pkgbuild(path, ignore_deps=False): ''' Makes a package from a pkgbuild ''' path = abspath(path) if basename(path) != Package.PKGBUILD: path = join(path, Package.PKGBUILD) if not isfile(path): raise BuildError(_('Could not find PKGBUILD: {0}').format(path)) info = PkgbuildParser(path).parse() if not ignore_deps: unresolved = Pacman.check_deps(info['depends'] + info['makedepends']) if unresolved: raise DependencyError(path, unresolved) path = dirname(path) log = bool(Config.get('buildlog', False)) if Config.get('pkgbuild', False): path = Package._load_pkgbuild(info['name'], path) try: Pacman.make_package(path, log=log) except PacmanError as e: raise e finally: pkgfile = None for f in (f for f in listdir(path) if f.startswith(info['name'])): if log and f.endswith(Package.LOGEXT): BuildLog.store(info['name'], join(path, f)) elif f.endswith(Package.EXT): pkgfile = f if pkgfile: return Package.from_file(join(path, pkgfile)) raise BuildError(_('Could not find any package'))
def make_package(path, force=False): ''' Calls makepkg ''' try: chdir(path) except: raise PacmanError(_('Could not change working directory: {0}').format(path)) cmd = [Pacman.MAKEPKG, '-d'] if force: cmd.append('-f') if Config.get('buildlog', False): cmd += ['-L', '-m'] if Config.get('sign', False): cmd.append('--sign') else: cmd.append('--nosign') Pacman.call(cmd)
def init(repo_path): ''' Sets the path and opens the log file ''' Log._path = Config.get('log', Log.FILENAME) if not isabs(Log._path): Log._path = join(repo_path, Log._path) try: if not isdir(dirname(Log._path)): makedirs(dirname(Log._path), mode=0o755, exist_ok=True) Log._file = open(Log._path, 'a') except: raise LogError(_('Could not open log file: {0}').format(Log._path))
def init(repo_path): """ Sets the path and opens the log file """ Log._path = Config.get("log", Log.FILENAME) if not isabs(Log._path): Log._path = join(repo_path, Log._path) try: if not isdir(dirname(Log._path)): makedirs(dirname(Log._path), mode=0o755, exist_ok=True) Log._file = open(Log._path, "a") except: raise LogError(_("Could not open log file: {0}").format(Log._path))
def make_package(path, force=False): ''' Calls makepkg ''' try: chdir(path) except: raise PacmanError( _('Could not change working directory: {0}').format(path)) cmd = [Pacman.MAKEPKG, '-d'] if force: cmd.append('-f') if Config.get('buildlog', False): cmd += ['-L', '-m'] if Config.get('sign', False): cmd.append('--sign') else: cmd.append('--nosign') Pacman.call(cmd)
def test_repo_remove(self): Config.init('mytestrepo') Config.set('signdb', False) Pacman.repo_remove('my.db.tar.gz', ['pkg1', 'pkg2']) self.assertEqual('/usr/bin/repo-remove my.db.tar.gz pkg1 pkg2', PacmanTest.cmd) Config.set('signdb', True) Pacman.repo_remove('db', ['pkg1']) self.assertEqual('/usr/bin/repo-remove db pkg1 --verify --sign', PacmanTest.cmd)
def _process_pkgbuild(path): ''' Parses the PKGBUILD and stores or loads it in/from the pkgbuild dir ''' info = PkgbuildParser(path).parse() path = dirname(path) if not Config.get('pkgbuild', False): return path, info if not path.startswith(PkgbuildLog.log_dir(info['name'])): PkgbuildLog.store(info['name'], path) return path, info tmpdir = join(Package.get_tmpdir(), info['name']) PkgbuildLog.load(info['name'], tmpdir) return tmpdir, info
def _process_build_output(name, path): ''' Stores buildlogs and finds the package file ''' try: files = (f for f in listdir(path) if f.startswith(name)) except OSError: raise BuildError(_('Could not list directory: {0}').format(path)) pkgfile = None log = Config.get('buildlog', False) for f in files: if log and f.endswith(Package.LOGEXT): BuildLog.store(name, join(path, f)) elif f.endswith(Package.EXT): pkgfile = f return pkgfile
def find_db(self, path): """ Finds the repo database """ path = abspath(path) if path.endswith(Repo.EXT): return path if path.endswith(Repo.LINKEXT): return splitext(path)[0] + Repo.EXT if not isdir(path): raise DbError(_("Could not find database: {0}").format(path)) try: return next(join(path, f) for f in listdir(path) if f.endswith(Repo.EXT)) except OSError: raise DbError(_("Could not list directory: {0}").format(path)) except StopIteration: return join(path, Config.get("reponame") + Repo.EXT)
def find_db(self, path): ''' Finds the repo database ''' path = abspath(path) if path.endswith(Repo.EXT): return path if path.endswith(Repo.LINKEXT): return splitext(path)[0] + Repo.EXT if not isdir(path): raise DbError(_('Could not find database: {0}').format(path)) try: return next( join(path, f) for f in listdir(path) if f.endswith(Repo.EXT)) except OSError: raise DbError(_('Could not list directory: {0}').format(path)) except StopIteration: return join(path, Config.get('reponame') + Repo.EXT)
def aur_upgrade(): ''' Upgrades all packages from the AUR ''' pkgs = [ pkg for pkg in LocalRepo._repo if pkg not in Config.get('no-aur-upgrade', []) ] Msg.info(_('{0} packages found').format(len(pkgs))) Log.log(_('Starting an AUR upgrade')) if len(pkgs) is 0: Msg.info(_('Nothing to do')) return Msg.process(_('Retrieving package info from the AUR')) pkgs, errors = Aur.packages(pkgs) for e in errors: Msg.error(e) Msg.info(_('{0} packages found').format(len(pkgs))) Msg.process(_('Checking for updates')) updates = [] for name, pkg in ((name, pkg) for name, pkg in pkgs.items() if name in LocalRepo._repo): oldpkg = LocalRepo._repo[name] if oldpkg.has_smaller_version_than(pkg['version']): updates.append(pkg) Msg.result('{0} ({1} -> {2})'.format(name, oldpkg.version, pkg['version'])) if not updates: Msg.info(_('All packages are up to date')) return if not Msg.ask(_('Upgrade?')): Msg.info(_('Bye')) LocalRepo.shutdown(1) LocalRepo.add([pkg['uri'] for pkg in updates], force=True)
def _make_package(path, force=False): ''' Makes a new package ''' Msg.process(_('Forging a new package: {0}').format(path)) Log.log(_('Forging a new package: {0}').format(path)) try: return Package.forge(path, force=force) except DependencyError as e: installed_deps = LocalRepo._install_deps(e.deps) try: pkg = Package.from_pkgbuild(e.pkgbuild, ignore_deps=True, force=force) except LocalRepoError as e: LocalRepo.error(e) if Config.get('uninstall-deps', True) and installed_deps: LocalRepo._uninstall_deps(e.deps) return pkg except LocalRepoError as e: LocalRepo.error(e)
def aur_upgrade(): ''' Upgrades all packages from the AUR ''' pkgs = [pkg for pkg in LocalRepo._repo if pkg not in Config.get('no-aur-upgrade', [])] Msg.info(_('{0} packages found').format(len(pkgs))) Log.log(_('Starting an AUR upgrade')) if len(pkgs) is 0: Msg.info(_('Nothing to do')) return Msg.process(_('Retrieving package info from the AUR')) pkgs, errors = Aur.packages(pkgs) for e in errors: Msg.error(e) Msg.info(_('{0} packages found').format(len(pkgs))) Msg.process(_('Checking for updates')) updates = [] for name, pkg in ((name, pkg) for name, pkg in pkgs.items() if name in LocalRepo._repo): oldpkg = LocalRepo._repo[name] if oldpkg.has_smaller_version_than(pkg['version']): updates.append(pkg) Msg.result('{0} ({1} -> {2})'.format(name, oldpkg.version, pkg['version'])) if not updates: Msg.info(_('All packages are up to date')) return if not Msg.ask(_('Upgrade?')): Msg.info(_('Bye')) LocalRepo.shutdown(1) LocalRepo.add([pkg['uri'] for pkg in updates], force=True)
def test_save(self): Config.init('test2', path=self.conf) Config.set('path', self.repo) Config.set('sign', True) Config.set('log', '/some/fancy/path') Config.save(path=self.conf) Config.init('test2', path=self.conf) self.assertEqual(self.repo, Config.get('path')) self.assertIs(True, Config.get('sign')) self.assertEqual('/some/fancy/path', Config.get('log'))
def init(repo_path): """ Sets the path """ PkgbuildLog._path = Config.get("pkgbuild", PkgbuildLog.DIRNAME) if not isabs(PkgbuildLog._path): PkgbuildLog._path = join(repo_path, PkgbuildLog._path)
def init(repo_path): """ Sets the path """ BuildLog._path = Config.get("buildlog", BuildLog.DIRNAME) if not isabs(BuildLog._path): BuildLog._path = join(repo_path, BuildLog._path)
def init(repo_path): ''' Sets the path ''' BuildLog._path = Config.get('buildlog', BuildLog.DIRNAME) if not isabs(BuildLog._path): BuildLog._path = join(repo_path, BuildLog._path)
def test_find_repo_by_path(self): self.assertEqual('test', Config.find_repo_by_path(self.repo)) self.assertEqual('/home', Config.find_repo_by_path('/home/something'))
def test_normalize_path(self): equal = [join(self.repo, e) for e in ('test.db.tar.gz', 'test.db', '')] equal.append(self.repo) for e in equal: self.assertEqual(self.repo, Config.normalize_path(e))
def test_remove(self): Config.remove('sign') self.assertIs(False, Config.get('sign')) Config.remove('log') self.assertEqual('.log', Config.get('log')) Config.remove('buildlog') self.assertIs(None, Config.get('buildlog')) Config.remove('no-aur-upgrade') self.assertIs(None, Config.get('no-aur-upgrade'))
def test_set(self): Config.set('hello', 'world') self.assertEqual('world', Config.get('hello')) Config.set('sign', False) self.assertIs(False, Config.get('sign')) Config.set('something', True) self.assertEqual('yes', Config.get('something')) Config.set('no-aur-upgrade', ['pkg1', 'pkg2']) self.assertEqual(['pkg1', 'pkg2'], Config.get('no-aur-upgrade')) Config.set('something', [1, 2, 3]) self.assertEqual('1 2 3', Config.get('something'))
def init(repo_path): ''' Sets the path ''' PkgbuildLog._path = Config.get('pkgbuild', PkgbuildLog.DIRNAME) if not isabs(PkgbuildLog._path): PkgbuildLog._path = join(repo_path, PkgbuildLog._path)
def rebuild(names): ''' Rebuilds the specified packages ''' if not Config.get('pkgbuild', False): LocalRepo.error(_('Please specify \'pkgbuild\' in your config file!')) LocalRepo.add([PkgbuildLog.log_dir(name) for name in names], force=True)