def get_distro(): # try using the lsb_release tool to get the distro info try: distro = cmds.get_output(["lsb_release", "--short", "--id"]).strip() release = cmds.get_output(["lsb_release", "--short", "--release"]).strip() codename = cmds.get_output(["lsb_release", "--short", "--codename"]).strip() if codename: return "%s %s (%s)" % (distro, release, codename) else: return "%s %s" % (distro, release) except (CommandError, IOError): pass # otherwise, look for a /etc/*-release file release_files = ["/etc/redhat-release", "/etc/debian_version"] release_files.extend( [ os.path.join("/etc", fname) for fname in os.listdir("/etc") if fname.endswith("release") and fname != "lsb-release" ] ) for filename in release_files: if os.path.exists(filename): return open(filename, "r").readline().strip() osx = commands.getoutput("sw_vers -productVersion") if osx: return "Mac OS X " + osx # else: return None
def get_distro(): # try using the lsb_release tool to get the distro info try: distro = cmds.get_output(['lsb_release', '--short', '--id']).strip() release = cmds.get_output(['lsb_release', '--short', '--release']).strip() codename = cmds.get_output(['lsb_release', '--short', '--codename']).strip() if codename: return '%s %s (%s)' % (distro, release, codename) else: return '%s %s' % (distro, release) except (CommandError, IOError): pass # otherwise, look for a /etc/*-release file release_files = ['/etc/redhat-release', '/etc/debian_version' ] release_files.extend([ os.path.join('/etc', fname) for fname in os.listdir('/etc') if fname.endswith('release') \ and fname != 'lsb-release' ]) for filename in release_files: if os.path.exists(filename): return open(filename, 'r').readline().strip() osx = commands.getoutput('sw_vers -productVersion') if osx: return 'Mac OS X ' + osx; # else: return None
def get_distro(): # try using the lsb_release tool to get the distro info try: distro = cmds.get_output(['lsb_release', '--short', '--id']).strip() release = cmds.get_output(['lsb_release', '--short', '--release']).strip() codename = cmds.get_output(['lsb_release', '--short', '--codename']).strip() if codename: return '%s %s (%s)' % (distro, release, codename) else: return '%s %s' % (distro, release) except (CommandError, IOError): pass # otherwise, look for a /etc/*-release file release_files = ['/etc/redhat-release', '/etc/debian_version'] release_files.extend([ os.path.join('/etc', fname) for fname in os.listdir('/etc') if fname.endswith('release') \ and fname != 'lsb-release' ]) for filename in release_files: if os.path.exists(filename): return open(filename, 'r').readline().strip() osx = commands.getoutput('sw_vers -productVersion') if osx: return 'Mac OS X ' + osx # else: return None
def has_diverged_from_remote_branch(self, branch): git_extra_args = { 'cwd': self.get_checkoutdir(), 'extra_env': get_git_extra_env(), } base = get_output(['git', 'merge-base', 'HEAD', 'origin/' + branch], **git_extra_args).strip() head = get_output(['git', 'rev-parse', 'HEAD'], **git_extra_args).strip() return base != head
def exists(self): try: get_output(['svn', 'ls', self.module], extra_env={ 'LD_LIBRARY_PATH': os.environ.get('UNMANGLED_LD_LIBRARY_PATH'), }) return True except: return False
def execute_git_predicate(self, predicate): """A git command wrapper for the cases, where only the boolean outcome is of interest. """ try: get_output(predicate, cwd=self.get_checkoutdir(), extra_env=get_git_extra_env()) except CommandError: return False return True
def exists(self): try: get_output(['git', 'ls-remote', self.module], extra_env=get_git_extra_env()) except CommandError: return False # FIXME: Parse output from ls-remote to work out if tag/branch is present return True
def tree_id(self): if not os.path.exists(self.srcdir): return None else: try: # --tree is relatively new (bzr 1.17) cmd = ['bzr', 'revision-info', '--tree'] tree_id = get_output(cmd, cwd=self.srcdir).strip() except: cmd = ['bzr', 'revision-info'] tree_id = get_output(cmd, cwd=self.srcdir).strip() return tree_id
def run(self, config, options, args, help=None): if options.branch: branch = options.branch else: if type(config.moduleset) is list: branch = config.moduleset[0].replace(".", "-") else: branch = config.moduleset.replace(".", "-") branch = branch.replace("gnome-suites-", "gnome-") module_set = jhbuild.moduleset.load(config) module_list = module_set.get_module_list(args or config.modules) for mod in module_list: if mod.type in ("meta", "tarball"): continue if not mod.branch or not mod.branch.repository.__class__.__name__ == "GitRepository": continue if not "git.gnome.org" in mod.branch.repository.href: continue if mod.branch.branch: # there is already a branch defined continue try: if get_output(["git", "ls-remote", "git://git.gnome.org/%s" % mod.name, "refs/heads/%s" % branch]): uprint( _("%(module)s is missing branch definition for %(branch)s") % {"module": mod.name, "branch": branch} ) except CommandError: pass
def run(self, config, options, args, help=None): if options.branch: branch = options.branch else: if type(config.moduleset) is list: branch = config.moduleset[0].replace('.', '-') else: branch = config.moduleset.replace('.', '-') branch = branch.replace('gnome-suites-', 'gnome-') module_set = jhbuild.moduleset.load(config) module_list = module_set.get_module_list(args or config.modules) for mod in module_list: if mod.type in ('meta', 'tarball'): continue if not mod.branch or not mod.branch.repository.__class__.__name__ == 'GitRepository': continue if not 'git.gnome.org' in mod.branch.repository.href: continue if mod.branch.branch: # there is already a branch defined continue try: if get_output(['git', 'ls-remote', 'git://git.gnome.org/%s' % mod.name, 'refs/heads/%s' % branch]): uprint(_('%(module)s is missing branch definition for %(branch)s') % {'module': mod.name, 'branch': branch}) except CommandError: pass
def tree_id(self): try: output = get_output(['mtn', 'automate', 'get_base_revision_id'], cwd=self.srcdir) except CommandError: return None return output[0]
def run(self, config, options, args, help=None): if options.branch: branch = options.branch else: if type(config.moduleset) is list: branch = config.moduleset[0].replace('.', '-') else: branch = config.moduleset.replace('.', '-') for prefix in ('gnome-suites-core-deps', 'gnome-suites-core', 'gnome-suites-', 'gnome-apps-'): branch = branch.replace(prefix, 'gnome-') module_set = jhbuild.moduleset.load(config) module_list = module_set.get_module_list(args or config.modules) for mod in module_list: if mod.type in ('meta', 'tarball'): continue if not mod.branch or not mod.branch.repository.__class__.__name__ == 'GitRepository': continue if not 'git.gnome.org' in mod.branch.repository.href: continue if mod.branch.branch: # there is already a branch defined continue try: if get_output(['git', 'ls-remote', 'git://git.gnome.org/%s' % mod.name, 'refs/heads/%s' % branch]): uprint(_('%(module)s is missing branch definition for %(branch)s') % {'module': mod.name, 'branch': branch}) except CommandError: pass
def get_remote_branches_list(self): return [ x.strip() for x in get_output(['git', 'branch', '-r'], cwd=self.get_checkoutdir(), extra_env=get_git_extra_env()).splitlines() ]
def create_mirror(self, buildscript): if not self.config.dvcs_mirror_dir: return if self.config.nonetwork: return if not os.path.exists(os.path.join(self.config.dvcs_mirror_dir, '.bzr')): cmd = ['bzr', 'init-repo', '--no-trees', self.config.dvcs_mirror_dir] buildscript.execute(cmd) local_mirror = os.path.join(self.config.dvcs_mirror_dir, self.checkoutdir) if not os.path.exists(local_mirror): cmd = ['bzr', 'init', '--create-prefix', local_mirror] buildscript.execute(cmd) if os.path.exists(self.srcdir): cmd = ['bzr', 'info', self.srcdir] cwd = self.config.dvcs_mirror_dir try: info = get_output(cmd, cwd=cwd) if info.find('checkout of branch: %s' % self.checkoutdir) == -1: raise NameError except: raise FatalError(_(""" Path %s does not seem to be a checkout from dvcs_mirror_dir. Remove it or change your dvcs_mirror_dir settings.""") % self.srcdir) else: cmd = ['bzr', 'co', '--light', local_mirror, self.srcdir] buildscript.execute(cmd)
def _export(self, buildscript): self._checkout(buildscript) try: output = get_output(['git', 'rev-parse', 'HEAD'], cwd=self.get_checkoutdir(), get_stderr=False, extra_env=get_git_extra_env()) tag = output.strip() except CommandError: tag = 'unknown' filename = self.get_module_basename() + '-' + tag + '.zip' if self.config.export_dir is not None: path = os.path.join(self.config.export_dir, filename) else: path = os.path.join(self.checkoutroot, filename) git_extra_args = { 'cwd': self.get_checkoutdir(), 'extra_env': get_git_extra_env() } buildscript.execute(['git', 'archive', '-o', path, 'HEAD'], **git_extra_args)
def real_update(self, buildscript): # Do not assume that shallow_clone option was the same # during checkout as now. info = get_output(['bzr', 'info', self.srcdir], cwd=self.srcdir) if info.startswith('Light'): cmd = ['bzr', 'update'] + self.revspec else: cmd = ['bzr', 'pull'] + self.revspec + [self.module] buildscript.execute(cmd, cwd=self.srcdir)
def get_default_branch_name(self): try: default_branch = get_output( ['git', 'symbolic-ref', '--short', 'refs/remotes/origin/HEAD'], cwd=self.get_checkoutdir(), extra_env=get_git_extra_env()).strip() except CommandError: return 'master' return default_branch.replace('origin/', '')
def get_subdirs(url): print _("Getting SVN subdirs: this operation might be long...") output = get_output( ['svn', 'ls', '-R', url], extra_env=get_svn_extra_env()) ret = [] for line in output.splitlines(): if not line[-1] == '/': continue ret.append (line) return ret
def get_info(filename): output = get_output( ['svn', 'info', filename], extra_env=get_svn_extra_env()) ret = {} for line in output.splitlines(): if ':' not in line: continue key, value = line.split(':', 1) ret[key.lower().strip()] = value.strip() return ret
def get_externals(url): output = get_output(['svn', 'propget', 'svn:externals', url], extra_env=get_svn_extra_env()) ret = {} for line in output.splitlines(): if ' ' not in line: continue key, value = line.split(' ') ret[key.strip()] = value return ret
def tree_id(self): md5sum = hashlib.md5() if self.patches: for patch in self.patches: md5sum.update(patch[0]) if self.quilt: md5sum.update(get_output('quilt files', cwd=self.srcdir, extra_env={'QUILT_PATCHES' : self.quilt.srcdir})) return '%s-%s' % (self.version, md5sum.hexdigest())
def tree_id(self): if not os.path.exists(self.srcdir): return None try: # --tree is new in bzr 1.17 cmd = ['bzr', 'revision-info', '--tree'] tree_id = get_output(cmd, cwd=self.srcdir).strip() except CommandError: return None return tree_id
def exists(self): try: refs = get_output(['git', 'ls-remote', self.module], extra_env=get_git_extra_env()) except: return False #FIXME: Parse output from ls-remote to work out if tag/branch is present return True
def _update(self, buildscript, copydir=None): if self.config.sticky_date: raise FatalError(_('date based checkout not yet supported\n')) if self.config.quiet_mode: quiet = ['-q'] else: quiet = [] cwd = self.get_checkoutdir() git_extra_args = {'cwd': cwd, 'extra_env': get_git_extra_env()} last_revision = get_output(['git', 'svn', 'find-rev', 'HEAD'], **git_extra_args) stashed = False if get_output(['git', 'diff'], **git_extra_args): # stash uncommitted changes on the current branch stashed = True buildscript.execute(['git', 'stash', 'save', 'jhbuild-stash'], **git_extra_args) buildscript.execute(['git', 'checkout'] + quiet + ['master'], **git_extra_args) buildscript.execute(['git', 'svn', 'rebase'], **git_extra_args) if stashed: buildscript.execute(['git', 'stash', 'pop'], **git_extra_args) current_revision = get_output(['git', 'svn', 'find-rev', 'HEAD'], **git_extra_args) if last_revision != current_revision: try: # is known to fail on some versions cmd = "git svn show-ignore >> .git/info/exclude" buildscript.execute(cmd, **git_extra_args) except: pass # FIXME, git-svn should support externals self._get_externals(buildscript, self.branch)
def tree_id(self): md5sum = hashlib.md5() if self.patches: for patch in self.patches: md5sum.update(patch[0]) if self.quilt: md5sum.update( get_output('quilt files', cwd=self.srcdir, extra_env={'QUILT_PATCHES': self.quilt.srcdir})) return '%s-%s' % (self.version, md5sum.hexdigest())
def get_current_branch(self): """Returns either a branchname or None if head is detached""" if not self.is_inside_work_tree(): raise CommandError(_('Unexpected: Checkoutdir is not a git ' 'repository:' + self.get_checkoutdir())) try: return os.path.basename( get_output(['git', 'symbolic-ref', '-q', 'HEAD'], cwd=self.get_checkoutdir(), extra_env=get_git_extra_env()).strip()) except CommandError: return None
def _check_for_conflict(self): """Checks 'mtn automate heads' for more than 1 head which would mean we have conflicts""" output = get_output(['mtn', 'automate', 'heads'], cwd=self.srcdir) heads = len(output.splitlines()) if heads > 1: raise CommandError(_('branch %(branch)s has %(num)d heads') % {'branch':self.branch, 'num':heads})
def get_aclocal_path(version): data = get_output(['aclocal-%s' % version, '--print-ac-dir']) path = [data[:-1]] env = os.environ.get('ACLOCAL_FLAGS', '').split() i = 0 while i < len(env): if env[i] == '-I': path.append(env[i+1]) i = i + 2 else: i = i + 1 return path
def get_aclocal_path(): data = get_output(['aclocal', '--print-ac-dir']) path = [data[:-1]] env = os.environ.get('ACLOCAL_FLAGS', '').split() i = 0 while i < len(env): if env[i] == '-I': path.append(env[i + 1]) i = i + 2 else: i = i + 1 return path
def get_current_branch(self): """Returns either a branchname or None if head is detached""" if not self.is_inside_work_tree(): raise CommandError(_('Unexpected: Checkoutdir is not a git ' 'repository:' + self.get_checkoutdir())) try: full_branch = get_output(['git', 'symbolic-ref', '-q', 'HEAD'], cwd=self.get_checkoutdir(), extra_env=get_git_extra_env()).strip() # strip refs/heads/ to get the branch name only return full_branch.replace('refs/heads/', '') except CommandError: return None
def get_current_branch(self): """Returns either a branchname or None if head is detached""" if not self.is_inside_work_tree(): raise CommandError( _('Unexpected: Checkoutdir is not a git ' 'repository:' + self.get_checkoutdir())) try: full_branch = get_output(['git', 'symbolic-ref', '-q', 'HEAD'], cwd=self.get_checkoutdir(), extra_env=get_git_extra_env()).strip() # strip refs/heads/ to get the branch name only return full_branch.replace('refs/heads/', '') except CommandError: return None
def tree_id(self): if not os.path.exists(self.get_checkoutdir()): return None try: output = get_output(['git', 'rev-parse', 'HEAD'], cwd = self.get_checkoutdir(), get_stderr=False, extra_env=get_git_extra_env()) except CommandError: return None except GitUnknownBranchNameError: return None id_suffix = '' if self.is_dirty(): id_suffix = self.dirty_branch_suffix return output.strip() + id_suffix
def get_info(filename): # we run Subversion in the C locale, because Subversion localises # the key names in the output. See bug #334678 for more info. output = get_output( ['svn', 'info', filename], extra_env={ 'LANGUAGE': 'C', 'LC_ALL': 'C', 'LANG': 'C'}) ret = {} for line in output.splitlines(): if ':' not in line: continue key, value = line.split(':', 1) ret[key.lower().strip()] = value.strip() return ret
def _checkout(self, buildscript, copydir=None): from . import svn if self.config.sticky_date: raise FatalError(_('date based checkout not yet supported\n')) cmd = ['git', 'svn', 'clone', self.module] if self.checkoutdir: cmd.append(self.checkoutdir) # FIXME (add self.revision support) try: last_revision = svn.get_info(self.module)['last changed rev'] if not self.revision: cmd.extend(['-r', last_revision]) except KeyError: raise FatalError( _('Cannot get last revision from %s. Check the module location.' ) % self.module) if copydir: buildscript.execute(cmd, cwd=copydir, extra_env=get_git_extra_env()) else: buildscript.execute(cmd, cwd=self.config.checkoutroot, extra_env=get_git_extra_env()) try: # is known to fail on some versions cmd = ['git', 'svn', 'show-ignore'] s = get_output(cmd, cwd=self.get_checkoutdir(copydir), extra_env=get_git_extra_env()) fd = open( os.path.join(self.get_checkoutdir(copydir), '.git/info/exclude'), 'a') fd.write(s) fd.close() buildscript.execute(cmd, cwd=self.get_checkoutdir(copydir), extra_env=get_git_extra_env()) except (CommandError, EnvironmentError): pass # FIXME, git-svn should support externals self._get_externals(buildscript, self.branch)
def _update(self, buildscript, copydir=None): if self.config.sticky_date: raise FatalError(_('date based checkout not yet supported\n')) cwd = self.get_checkoutdir() git_extra_args = {'cwd': cwd, 'extra_env': get_git_extra_env()} stashed = False # stash uncommitted changes on the current branch if get_output(['git', 'diff'], **git_extra_args): # stash uncommitted changes on the current branch stashed = True buildscript.execute(['git', 'stash', 'save', 'jhbuild-stash'], **git_extra_args) self._checkout(buildscript, copydir=copydir) if stashed: buildscript.execute(['git', 'stash', 'pop'], **git_extra_args)
def _checkout(self, buildscript, copydir=None): if self.config.sticky_date: raise FatalError(_('date based checkout not yet supported\n')) cmd = ['git', 'svn', 'clone', self.module] if self.checkoutdir: cmd.append(self.checkoutdir) # FIXME (add self.revision support) try: last_revision = jhbuild.versioncontrol.svn.get_info (self.module)['last changed rev'] if not self.revision: cmd.extend(['-r', last_revision]) except KeyError: raise FatalError(_('Cannot get last revision from %s. Check the module location.') % self.module) if copydir: buildscript.execute(cmd, cwd=copydir, extra_env=get_git_extra_env()) else: buildscript.execute(cmd, cwd=self.config.checkoutroot, extra_env=get_git_extra_env()) try: # is known to fail on some versions cmd = ['git', 'svn', 'show-ignore'] s = get_output(cmd, cwd = self.get_checkoutdir(copydir), extra_env=get_git_extra_env()) fd = file(os.path.join( self.get_checkoutdir(copydir), '.git/info/exclude'), 'a') fd.write(s) fd.close() buildscript.execute(cmd, cwd=self.get_checkoutdir(copydir), extra_env=get_git_extra_env()) except: pass # FIXME, git-svn should support externals self._get_externals(buildscript, self.branch)
def warn_local_modulesets(config): if config.use_local_modulesets: return moduleset_local_path = os.path.join(SRCDIR, 'modulesets') if not os.path.exists(moduleset_local_path): # moduleset-less checkout return if not os.path.exists(os.path.join(moduleset_local_path, '..', '.git')): # checkout was not done via git return if isinstance(config.moduleset, list): modulesets = config.moduleset else: modulesets = [ config.moduleset ] if not [x for x in modulesets if x.find('/') == -1]: # all modulesets have a slash; they are URI return try: git_diff = get_output(['git', 'diff', 'origin/master', '--', '.'], cwd=moduleset_local_path).strip() except CommandError: # git error, ignore return if not git_diff: # no locally modified moduleset return logging.info( _('Modulesets were edited locally but JHBuild is configured '\ 'to get them from the network, perhaps you need to add '\ 'use_local_modulesets = True to your %s.' % config.filename))
def warn_local_modulesets(config): if config.use_local_modulesets: return moduleset_local_path = os.path.join(SRCDIR, 'modulesets') if not os.path.exists(moduleset_local_path): # moduleset-less checkout return if not os.path.exists(os.path.join(moduleset_local_path, '..', '.git')): # checkout was not done via git return if type(config.moduleset) == type([]): modulesets = config.moduleset else: modulesets = [ config.moduleset ] if not [x for x in modulesets if x.find('/') == -1]: # all modulesets have a slash; they are URI return try: git_diff = get_output(['git', 'diff', 'origin/master', '--', '.'], cwd=moduleset_local_path).strip() except CommandError: # git error, ignore return if not git_diff: # no locally modified moduleset return logging.info( _('Modulesets were edited locally but JHBuild is configured '\ 'to get them from the network, perhaps you need to add '\ 'use_local_modulesets = True to your %s.' % config.filename))
def create_mirror(self, buildscript): if not self.config.dvcs_mirror_dir: return if self.config.nonetwork: return if not os.path.exists(os.path.join(self.config.dvcs_mirror_dir, '.bzr')): cmd = [ 'bzr', 'init-repo', '--no-trees', self.config.dvcs_mirror_dir ] buildscript.execute(cmd) local_mirror = os.path.join(self.config.dvcs_mirror_dir, self.checkoutdir) if not os.path.exists(local_mirror): cmd = ['bzr', 'init', '--create-prefix', local_mirror] buildscript.execute(cmd) if os.path.exists(self.srcdir): cmd = ['bzr', 'info', self.srcdir] cwd = self.config.dvcs_mirror_dir try: info = get_output(cmd, cwd=cwd) if info.find('checkout of branch: %s' % self.checkoutdir) == -1: raise NameError except: raise FatalError( _(""" Path %s does not seem to be a checkout from dvcs_mirror_dir. Remove it or change your dvcs_mirror_dir settings.""") % self.srcdir) else: cmd = ['bzr', 'co', '--light', local_mirror, self.srcdir] buildscript.execute(cmd)
def check_version(cmd, regexp, minver): try: data = get_output(cmd) except: return False match = re.match(regexp, data, re.MULTILINE) if not match: return False version = match.group(1) version = version.split('.') for i, ver in enumerate(version): part = re.sub(r'^[^\d]*(\d+).*$', r'\1', ver) if not part: version[i] = None else: version[i] = int(part) minver = minver.split('.') for i, ver in enumerate(minver): part = re.sub(r'^[^\d]*(\d+).*$', r'\1', ver) if not part: minver[i] = None else: minver[i] = int(part) return version >= minver
class cmd_sanitycheck(Command): doc = N_('Check that required support tools are available') name = 'sanitycheck' usage_args = '' def run(self, config, options, args, help=None): if args: raise UsageError(_('no extra arguments expected')) # check whether the checkout root and install prefix are writable if not (os.path.isdir(config.checkoutroot) and os.access( config.checkoutroot, os.R_OK | os.W_OK | os.X_OK)): uprint( _('checkout root (%s) is not writable') % config.checkoutroot) if not (os.path.isdir(config.prefix) and os.access(config.prefix, os.R_OK | os.W_OK | os.X_OK)): uprint(_('install prefix (%s) is not writable') % config.prefix) # check whether various tools are installed if not check_version(['libtoolize', '--version'], r'libtoolize \([^)]*\) ([\d.]+)', '1.5'): uprint(_('%s not found') % 'libtool >= 1.5') if not check_version(['gettext', '--version'], r'gettext \([^)]*\) ([\d.]+)', '0.10.40'): uprint(_('%s not found') % 'gettext >= 0.10.40') if not check_version(['pkg-config', '--version'], r'^([\d.]+)', '0.14.0'): uprint(_('%s not found') % 'pkg-config >= 0.14.0') if not check_version(['autoconf', '--version'], r'autoconf \([^)]*\) ([\d.]+)', '2.53'): uprint(_('%s not found') % 'autoconf >= 2.53') if not check_version(['automake', '--version'], r'automake \([^)]*\) ([\d.]+)', '1.10'): uprint(_('%s not found') % 'automake >= 1.10') try: not_in_path = [] path = get_aclocal_path() macros = ['libtool.m4', 'gettext.m4', 'pkg.m4'] for macro in macros: if not inpath(macro, path): uprint( _("aclocal can't see %s macros") % (macro.split('.m4')[0])) if not_in_path.count(macro) == 0: not_in_path.append(macro) if len(not_in_path) > 0: uprint(_("Please copy the lacking macros (%(macros)s) in one of the following paths: %(path)s") % \ {'macros': ', '.join(not_in_path), 'path': ', '.join(path)}) except CommandError, exc: uprint(str(exc)) # XML catalog sanity checks if not os.access('/etc/xml/catalog', os.R_OK): uprint(_('Could not find XML catalog')) else: for (item, name) in [ ('-//OASIS//DTD DocBook XML V4.1.2//EN', 'DocBook XML DTD V4.1.2'), ('http://docbook.sourceforge.net/release/xsl/current/html/chunk.xsl', 'DocBook XSL Stylesheets') ]: try: data = get_output(['xmlcatalog', '/etc/xml/catalog', item]) except: uprint(_('Could not find %s in XML catalog') % name) # Perl modules used by tools such as intltool: for perlmod in ['XML::Parser']: try: get_output(['perl', '-M%s' % perlmod, '-e', 'exit']) except: uprint(_('Could not find the perl module %s') % perlmod) # check for cvs: if not inpath('cvs', os.environ['PATH'].split(os.pathsep)): uprint(_('%s not found') % 'cvs') # check for svn: if not inpath('svn', os.environ['PATH'].split(os.pathsep)): uprint(_('%s not found') % 'svn') if not (inpath('curl', os.environ['PATH'].split(os.pathsep)) or inpath('wget', os.environ['PATH'].split(os.pathsep))): uprint(_('curl or wget not found')) # check for git: if not inpath('git', os.environ['PATH'].split(os.pathsep)): uprint(_('%s not found') % 'git') else: try: git_help = os.popen('git --help', 'r').read() if not 'clone' in git_help: uprint(_('Installed git program is not the right git')) else: if not check_version(['git', '--version'], r'git version ([\d.]+)', '1.5.6'): uprint(_('%s not found') % 'git >= 1.5.6') except: uprint(_('Could not check git program')) # check for flex/bison: if not inpath('flex', os.environ['PATH'].split(os.pathsep)): uprint(_('%s not found') % 'flex') if not inpath('bison', os.environ['PATH'].split(os.pathsep)): uprint(_('%s not found') % 'bison') if not inpath('xzcat', os.environ['PATH'].split(os.pathsep)): uprint(_('%s not found') % 'xzcat')
def get_aclocal_path(): # drop empty paths, including the case where ACLOCAL_PATH is unset path = [x for x in os.environ.get('ACLOCAL_PATH', '').split(':') if x] data = get_output(['aclocal', '--print-ac-dir']) path.append(data[:-1]) return path
def setup_env(self): '''set environment variables for using prefix''' if not os.path.exists(self.prefix): try: os.makedirs(self.prefix) except: raise FatalError( _('install prefix (%s) can not be created') % self.prefix) if not os.path.exists(self.top_builddir): try: os.makedirs(self.top_builddir) except OSError: raise FatalError( _('working directory (%s) can not be created') % self.top_builddir) os.environ['JHBUILD_PREFIX'] = self.prefix os.environ['UNMANGLED_LD_LIBRARY_PATH'] = os.environ.get( 'LD_LIBRARY_PATH', '') if not os.environ.get('DBUS_SYSTEM_BUS_ADDRESS'): # Use the distribution's D-Bus for the system bus. JHBuild's D-Bus # will # be used for the session bus os.environ[ 'DBUS_SYSTEM_BUS_ADDRESS'] = 'unix:path=/var/run/dbus/system_bus_socket' # LD_LIBRARY_PATH if self.use_lib64: libdir = os.path.join(self.prefix, 'lib64') else: libdir = os.path.join(self.prefix, 'lib') self.libdir = libdir addpath('LD_LIBRARY_PATH', libdir) os.environ['JHBUILD_LIBDIR'] = libdir # LDFLAGS and C_INCLUDE_PATH are required for autoconf configure # scripts to find modules that do not use pkg-config (such as guile # looking for gmp, or wireless-tools for NetworkManager) # (see bug #377724 and bug #545018) # This path doesn't always get passed to addpath so we fix it here if sys.platform.startswith('win'): libdir = jhbuild.utils.subprocess_win32.fix_path_for_msys(libdir) os.environ['LDFLAGS'] = ('-L%s ' % libdir) + os.environ.get( 'LDFLAGS', '') includedir = os.path.join(self.prefix, 'include') addpath('C_INCLUDE_PATH', includedir) addpath('CPLUS_INCLUDE_PATH', includedir) # On Mac OS X, we use DYLD_FALLBACK_LIBRARY_PATH addpath('DYLD_FALLBACK_LIBRARY_PATH', libdir) # PATH bindir = os.path.join(self.prefix, 'bin') addpath('PATH', bindir) # MANPATH manpathdir = os.path.join(self.prefix, 'share', 'man') addpath('MANPATH', '') addpath('MANPATH', manpathdir) # INFOPATH infopathdir = os.path.join(self.prefix, 'share', 'info') addpath('INFOPATH', infopathdir) # PKG_CONFIG_PATH if os.environ.get('PKG_CONFIG_PATH') is None and self.partial_build: for dirname in ('share', 'lib', 'lib64'): full_name = '/usr/%s/pkgconfig' % dirname if os.path.exists(full_name): addpath('PKG_CONFIG_PATH', full_name) pkgconfigdatadir = os.path.join(self.prefix, 'share', 'pkgconfig') pkgconfigdir = os.path.join(libdir, 'pkgconfig') addpath('PKG_CONFIG_PATH', pkgconfigdatadir) addpath('PKG_CONFIG_PATH', pkgconfigdir) # GI_TYPELIB_PATH if not 'GI_TYPELIB_PATH' in os.environ: if self.use_lib64: full_name = '/usr/lib64/girepository-1.0' else: full_name = '/usr/lib/girepository-1.0' if os.path.exists(full_name): addpath('GI_TYPELIB_PATH', full_name) typelibpath = os.path.join(self.libdir, 'girepository-1.0') addpath('GI_TYPELIB_PATH', typelibpath) # XDG_DATA_DIRS if self.partial_build: addpath('XDG_DATA_DIRS', '/usr/share') xdgdatadir = os.path.join(self.prefix, 'share') addpath('XDG_DATA_DIRS', xdgdatadir) # XDG_CONFIG_DIRS if self.partial_build: addpath('XDG_CONFIG_DIRS', '/etc') xdgconfigdir = os.path.join(self.prefix, 'etc', 'xdg') addpath('XDG_CONFIG_DIRS', xdgconfigdir) # XCURSOR_PATH xcursordir = os.path.join(self.prefix, 'share', 'icons') addpath('XCURSOR_PATH', xcursordir) # GST_PLUGIN_PATH gstplugindir = os.path.join(self.libdir, 'gstreamer-0.10') if os.path.exists(gstplugindir): addpath('GST_PLUGIN_PATH', gstplugindir) # GST_PLUGIN_PATH_1_0 gstplugindir = os.path.join(self.libdir, 'gstreamer-1.0') if os.path.exists(gstplugindir): addpath('GST_PLUGIN_PATH_1_0', gstplugindir) # GST_REGISTRY gstregistry = os.path.join(self.prefix, '_jhbuild', 'gstreamer-0.10.registry') addpath('GST_REGISTRY', gstregistry) # GST_REGISTRY_1_0 gstregistry = os.path.join(self.prefix, '_jhbuild', 'gstreamer-1.0.registry') addpath('GST_REGISTRY_1_0', gstregistry) # ACLOCAL_PATH aclocalpath = os.path.join(self.prefix, 'share', 'aclocal') addpath('ACLOCAL_PATH', aclocalpath) # ACLOCAL_FLAGS aclocaldir = os.path.join(self.prefix, 'share', 'aclocal') if not os.path.exists(aclocaldir): try: os.makedirs(aclocaldir) except: raise FatalError(_("Can't create %s directory") % aclocaldir) if self.partial_build: if os.path.exists('/usr/share/aclocal'): addpath('ACLOCAL_FLAGS', '/usr/share/aclocal') if os.path.exists('/usr/local/share/aclocal'): addpath('ACLOCAL_FLAGS', '/usr/local/share/aclocal') addpath('ACLOCAL_FLAGS', aclocaldir) # PERL5LIB perl5lib = os.path.join(self.prefix, 'lib', 'perl5') addpath('PERL5LIB', perl5lib) # These two variables are so that people who use "jhbuild shell" # can tweak their shell prompts and such to show "I'm under jhbuild". # The first variable is the obvious one to look for; the second # one is for historical reasons. os.environ['UNDER_JHBUILD'] = 'true' os.environ['CERTIFIED_GNOMIE'] = 'yes' # PYTHONPATH # Python inside jhbuild may be different than Python executing jhbuild, # so it is executed to get its version number (fallback to local # version number should never happen) python_bin = os.environ.get('PYTHON', 'python') try: pythonversion = 'python' + get_output([ python_bin, '-c', 'import sys; print(".".join([str(x) for x in sys.version_info[:2]]))' ], get_stderr=False).strip() except CommandError: pythonversion = 'python' + str(sys.version_info[0]) + '.' + str( sys.version_info[1]) if 'PYTHON' in os.environ: logging.warn( _('Unable to determine python version using the ' 'PYTHON environment variable (%s). Using default "%s"') % (os.environ['PYTHON'], pythonversion)) # In Python 2.6, site-packages got replaced by dist-packages, get the # actual value by asking distutils # <http://bugzilla.gnome.org/show_bug.cgi?id=575426> try: python_packages_dir = get_output([python_bin, '-c', 'import os, distutils.sysconfig; '\ 'print(distutils.sysconfig.get_python_lib(prefix="%s").split(os.path.sep)[-1])' % self.prefix], get_stderr=False).strip() except CommandError: python_packages_dir = 'site-packages' if 'PYTHON' in os.environ: logging.warn( _('Unable to determine python site-packages directory using the ' 'PYTHON environment variable (%s). Using default "%s"') % (os.environ['PYTHON'], python_packages_dir)) if self.use_lib64: pythonpath = os.path.join(self.prefix, 'lib64', pythonversion, python_packages_dir) addpath('PYTHONPATH', pythonpath) if not os.path.exists(pythonpath): os.makedirs(pythonpath) pythonpath = os.path.join(self.prefix, 'lib', pythonversion, python_packages_dir) addpath('PYTHONPATH', pythonpath) if not os.path.exists(pythonpath): os.makedirs(pythonpath) # if there is a Python installed in JHBuild prefix, set it in PYTHON # environment variable, so it gets picked up by configure scripts # <http://bugzilla.gnome.org/show_bug.cgi?id=560872> if os.path.exists(os.path.join(self.prefix, 'bin', 'python')): os.environ['PYTHON'] = os.path.join(self.prefix, 'bin', 'python') # Mono Prefixes os.environ['MONO_PREFIX'] = self.prefix os.environ['MONO_GAC_PREFIX'] = self.prefix # GConf: # Create a GConf source path file that tells GConf to use the data in # the jhbuild prefix (in addition to the data in the system prefix), # and point to it with GCONF_DEFAULT_SOURCE_PATH so modules will be read # the right data (assuming a new enough libgconf). gconfdir = os.path.join(self.prefix, 'etc', 'gconf') gconfpathdir = os.path.join(gconfdir, '2') if not os.path.exists(gconfpathdir): os.makedirs(gconfpathdir) gconfpath = os.path.join(gconfpathdir, 'path.jhbuild') if not os.path.exists(gconfpath) and os.path.exists( '/etc/gconf/2/path'): try: inp = open('/etc/gconf/2/path') out = open(gconfpath, 'w') for line in inp.readlines(): if '/etc/gconf' in line: out.write(line.replace('/etc/gconf', gconfdir)) out.write(line) out.close() inp.close() except: traceback.print_exc() raise FatalError( _('Could not create GConf config (%s)') % gconfpath) os.environ['GCONF_DEFAULT_SOURCE_PATH'] = gconfpath # Set GCONF_SCHEMA_INSTALL_SOURCE to point into the jhbuild prefix so # modules will install their schemas there (rather than failing to # install them into /etc). os.environ[ 'GCONF_SCHEMA_INSTALL_SOURCE'] = 'xml:merged:' + os.path.join( gconfdir, 'gconf.xml.defaults') # handle environment prepends ... for envvar in env_prepends.keys(): for path in env_prepends[envvar]: addpath(envvar, path) # get rid of gdkxft from the env -- it will cause problems. if os.environ.has_key('LD_PRELOAD'): valarr = os.environ['LD_PRELOAD'].split(' ') for x in valarr[:]: if x.find('libgdkxft.so') >= 0: valarr.remove(x) os.environ['LD_PRELOAD'] = ' '.join(valarr) self.update_build_targets()
def setup_env(self): '''set environment variables for using prefix''' if not os.path.exists(self.prefix): try: os.makedirs(self.prefix) except: raise FatalError(_('install prefix (%s) can not be created') % self.prefix) if not os.path.exists(self.top_builddir): try: os.makedirs(self.top_builddir) except OSError: raise FatalError( _('working directory (%s) can not be created') % self.top_builddir) os.environ['JHBUILD_PREFIX'] = self.prefix os.environ['UNMANGLED_LD_LIBRARY_PATH'] = os.environ.get('LD_LIBRARY_PATH', '') if not os.environ.get('DBUS_SYSTEM_BUS_ADDRESS'): # Use the distribution's D-Bus for the system bus. JHBuild's D-Bus # will # be used for the session bus os.environ['DBUS_SYSTEM_BUS_ADDRESS'] = 'unix:path=/var/run/dbus/system_bus_socket' # LD_LIBRARY_PATH if self.use_lib64: libdir = os.path.join(self.prefix, 'lib64') else: libdir = os.path.join(self.prefix, 'lib') self.libdir = libdir addpath('LD_LIBRARY_PATH', libdir) # LDFLAGS and C_INCLUDE_PATH are required for autoconf configure # scripts to find modules that do not use pkg-config (such as guile # looking for gmp, or wireless-tools for NetworkManager) # (see bug #377724 and bug #545018) # This path doesn't always get passed to addpath so we fix it here if sys.platform.startswith('win'): libdir = jhbuild.utils.subprocess_win32.fix_path_for_msys(libdir) os.environ['LDFLAGS'] = ('-L%s ' % libdir) + os.environ.get('LDFLAGS', '') includedir = os.path.join(self.prefix, 'include') addpath('C_INCLUDE_PATH', includedir) addpath('CPLUS_INCLUDE_PATH', includedir) # On Mac OS X, we use DYLD_FALLBACK_LIBRARY_PATH addpath('DYLD_FALLBACK_LIBRARY_PATH', libdir) # PATH bindir = os.path.join(self.prefix, 'bin') addpath('PATH', bindir) # MANPATH manpathdir = os.path.join(self.prefix, 'share', 'man') addpath('MANPATH', '') addpath('MANPATH', manpathdir) # INFOPATH infopathdir = os.path.join(self.prefix, 'share', 'info') addpath('INFOPATH', infopathdir) # PKG_CONFIG_PATH if os.environ.get('PKG_CONFIG_PATH') is None and self.partial_build: for dirname in ('share', 'lib', 'lib64'): full_name = '/usr/%s/pkgconfig' % dirname if os.path.exists(full_name): addpath('PKG_CONFIG_PATH', full_name) pkgconfigdatadir = os.path.join(self.prefix, 'share', 'pkgconfig') pkgconfigdir = os.path.join(libdir, 'pkgconfig') addpath('PKG_CONFIG_PATH', pkgconfigdatadir) addpath('PKG_CONFIG_PATH', pkgconfigdir) # GI_TYPELIB_PATH if not 'GI_TYPELIB_PATH' in os.environ: if self.use_lib64: full_name = '/usr/lib64/girepository-1.0' else: full_name = '/usr/lib/girepository-1.0' if os.path.exists(full_name): addpath('GI_TYPELIB_PATH', full_name) typelibpath = os.path.join(self.libdir, 'girepository-1.0') addpath('GI_TYPELIB_PATH', typelibpath) # XDG_DATA_DIRS if self.partial_build: addpath('XDG_DATA_DIRS', '/usr/share') xdgdatadir = os.path.join(self.prefix, 'share') addpath('XDG_DATA_DIRS', xdgdatadir) # XDG_CONFIG_DIRS if self.partial_build: addpath('XDG_CONFIG_DIRS', '/etc') xdgconfigdir = os.path.join(self.prefix, 'etc', 'xdg') addpath('XDG_CONFIG_DIRS', xdgconfigdir) # XCURSOR_PATH xcursordir = os.path.join(self.prefix, 'share', 'icons') addpath('XCURSOR_PATH', xcursordir) # GST_PLUGIN_PATH gstplugindir = os.path.join(self.libdir , 'gstreamer-0.10') if os.path.exists(gstplugindir): addpath('GST_PLUGIN_PATH', gstplugindir) # GST_PLUGIN_PATH_1_0 gstplugindir = os.path.join(self.libdir , 'gstreamer-1.0') if os.path.exists(gstplugindir): addpath('GST_PLUGIN_PATH_1_0', gstplugindir) # GST_REGISTRY gstregistry = os.path.join(self.prefix, '_jhbuild', 'gstreamer-0.10.registry') addpath('GST_REGISTRY', gstregistry) # GST_REGISTRY_1_0 gstregistry = os.path.join(self.prefix, '_jhbuild', 'gstreamer-1.0.registry') addpath('GST_REGISTRY_1_0', gstregistry) # ACLOCAL_PATH aclocalpath = os.path.join(self.prefix, 'share', 'aclocal') addpath('ACLOCAL_PATH', aclocalpath) # ACLOCAL_FLAGS aclocaldir = os.path.join(self.prefix, 'share', 'aclocal') if not os.path.exists(aclocaldir): try: os.makedirs(aclocaldir) except: raise FatalError(_("Can't create %s directory") % aclocaldir) if self.partial_build: if os.path.exists('/usr/share/aclocal'): addpath('ACLOCAL_FLAGS', '/usr/share/aclocal') if os.path.exists('/usr/local/share/aclocal'): addpath('ACLOCAL_FLAGS', '/usr/local/share/aclocal') addpath('ACLOCAL_FLAGS', aclocaldir) # PERL5LIB perl5lib = os.path.join(self.prefix, 'lib', 'perl5') addpath('PERL5LIB', perl5lib) # These two variables are so that people who use "jhbuild shell" # can tweak their shell prompts and such to show "I'm under jhbuild". # The first variable is the obvious one to look for; the second # one is for historical reasons. os.environ['UNDER_JHBUILD'] = 'true' os.environ['CERTIFIED_GNOMIE'] = 'yes' # PYTHONPATH # Python inside jhbuild may be different than Python executing jhbuild, # so it is executed to get its version number (fallback to local # version number should never happen) python_bin = os.environ.get('PYTHON', 'python') try: pythonversion = 'python' + get_output([python_bin, '-c', 'import sys; print(".".join([str(x) for x in sys.version_info[:2]]))'], get_stderr = False).strip() except CommandError: pythonversion = 'python' + str(sys.version_info[0]) + '.' + str(sys.version_info[1]) if 'PYTHON' in os.environ: logging.warn(_('Unable to determine python version using the ' 'PYTHON environment variable (%s). Using default "%s"') % (os.environ['PYTHON'], pythonversion)) # In Python 2.6, site-packages got replaced by dist-packages, get the # actual value by asking distutils # <http://bugzilla.gnome.org/show_bug.cgi?id=575426> try: python_packages_dir = get_output([python_bin, '-c', 'import os, distutils.sysconfig; '\ 'print(distutils.sysconfig.get_python_lib(prefix="%s").split(os.path.sep)[-1])' % self.prefix], get_stderr=False).strip() except CommandError: python_packages_dir = 'site-packages' if 'PYTHON' in os.environ: logging.warn(_('Unable to determine python site-packages directory using the ' 'PYTHON environment variable (%s). Using default "%s"') % (os.environ['PYTHON'], python_packages_dir)) if self.use_lib64: pythonpath = os.path.join(self.prefix, 'lib64', pythonversion, python_packages_dir) addpath('PYTHONPATH', pythonpath) if not os.path.exists(pythonpath): os.makedirs(pythonpath) pythonpath = os.path.join(self.prefix, 'lib', pythonversion, python_packages_dir) addpath('PYTHONPATH', pythonpath) if not os.path.exists(pythonpath): os.makedirs(pythonpath) # if there is a Python installed in JHBuild prefix, set it in PYTHON # environment variable, so it gets picked up by configure scripts # <http://bugzilla.gnome.org/show_bug.cgi?id=560872> if os.path.exists(os.path.join(self.prefix, 'bin', 'python')): os.environ['PYTHON'] = os.path.join(self.prefix, 'bin', 'python') # Mono Prefixes os.environ['MONO_PREFIX'] = self.prefix os.environ['MONO_GAC_PREFIX'] = self.prefix # GConf: # Create a GConf source path file that tells GConf to use the data in # the jhbuild prefix (in addition to the data in the system prefix), # and point to it with GCONF_DEFAULT_SOURCE_PATH so modules will be read # the right data (assuming a new enough libgconf). gconfdir = os.path.join(self.prefix, 'etc', 'gconf') gconfpathdir = os.path.join(gconfdir, '2') if not os.path.exists(gconfpathdir): os.makedirs(gconfpathdir) gconfpath = os.path.join(gconfpathdir, 'path.jhbuild') if not os.path.exists(gconfpath) and os.path.exists('/etc/gconf/2/path'): try: inp = open('/etc/gconf/2/path') out = open(gconfpath, 'w') for line in inp.readlines(): if '/etc/gconf' in line: out.write(line.replace('/etc/gconf', gconfdir)) out.write(line) out.close() inp.close() except: traceback.print_exc() raise FatalError(_('Could not create GConf config (%s)') % gconfpath) os.environ['GCONF_DEFAULT_SOURCE_PATH'] = gconfpath # Set GCONF_SCHEMA_INSTALL_SOURCE to point into the jhbuild prefix so # modules will install their schemas there (rather than failing to # install them into /etc). os.environ['GCONF_SCHEMA_INSTALL_SOURCE'] = 'xml:merged:' + os.path.join( gconfdir, 'gconf.xml.defaults') # handle environment prepends ... for envvar in env_prepends.keys(): for path in env_prepends[envvar]: addpath(envvar, path) # get rid of gdkxft from the env -- it will cause problems. if os.environ.has_key('LD_PRELOAD'): valarr = os.environ['LD_PRELOAD'].split(' ') for x in valarr[:]: if x.find('libgdkxft.so') >= 0: valarr.remove(x) os.environ['LD_PRELOAD'] = ' '.join(valarr) self.update_build_targets()
def run(self, config, options, args, help=None): if args: raise UsageError(_('no extra arguments expected')) # try creating jhbuild directories before checking they are accessible. try: os.makedirs(config.checkoutroot) os.makedirs(config.prefix) except OSError: pass # check whether the checkout root and install prefix are writable if not (os.path.isdir(config.checkoutroot) and os.access(config.checkoutroot, os.R_OK|os.W_OK|os.X_OK)): uprint(_('checkout root (%s) is not writable') % config.checkoutroot) if not (os.path.isdir(config.prefix) and os.access(config.prefix, os.R_OK|os.W_OK|os.X_OK)): uprint(_('install prefix (%s) is not writable') % config.prefix) autoconf = True # check whether various tools are installed if not check_version(['libtoolize', '--version'], r'libtoolize \([^)]*\) ([\d.]+)', '1.5'): uprint(_('%s not found') % 'libtool >= 1.5') if not check_version(['gettext', '--version'], r'gettext \([^)]*\) ([\d.]+)', '0.10.40'): uprint(_('%s not found') % 'gettext >= 0.10.40') if not check_version(['pkg-config', '--version'], r'^([\d.]+)', '0.14.0'): uprint(_('%s not found') % 'pkg-config >= 0.14.0') if not check_version(['autoconf', '--version'], r'autoconf \([^)]*\) ([\d.]+)', '2.53'): autoconf = False uprint(_('%s not found') % 'autoconf >= 2.53') if not check_version(['automake', '--version'], r'automake \([^)]*\) ([\d.]+)', '1.10'): uprint(_('%s not found') % 'automake >= 1.10') if (autoconf): self.check_m4() # XML catalog sanity checks xmlcatalog = True try: get_output(['which', 'xmlcatalog']) except: xmlcatalog = False uprint(_('Could not find XML catalog (usually part of the package \'libxml2-utils\')')) if (xmlcatalog): for (item, name) in [('-//OASIS//DTD DocBook XML V4.1.2//EN', 'DocBook XML DTD V4.1.2'), ('http://docbook.sourceforge.net/release/xsl/current/html/chunk.xsl', 'DocBook XSL Stylesheets')]: try: data = get_output(['xmlcatalog', '/etc/xml/catalog', item]) except: uprint(_('Could not find %s in XML catalog (usually part of package \'docbook-xsl\')') % name) # Perl module used by tools such as intltool: perlmod = 'XML::Parser' try: get_output(['perl', '-M%s' % perlmod, '-e', 'exit']) except: uprint(_('Could not find the Perl module %s (usually part of package \'libxml-parser-perl\' or \'perl-XML-Parser\')') % perlmod) # check for cvs: if not inpath('cvs', os.environ['PATH'].split(os.pathsep)): uprint(_('%s not found') % 'cvs') # check for svn: if not inpath('svn', os.environ['PATH'].split(os.pathsep)): uprint(_('%s not found (usually part of the package \'subversion\')') % 'svn') if not (inpath('curl', os.environ['PATH'].split(os.pathsep)) or inpath('wget', os.environ['PATH'].split(os.pathsep))): uprint(_('curl or wget not found')) # check for git: if not inpath('git', os.environ['PATH'].split(os.pathsep)): uprint(_('%s not found') % 'git') else: try: git_help = os.popen('git --help', 'r').read() if not 'clone' in git_help: uprint(_('Installed git program is not the right git')) else: if not check_version(['git', '--version'], r'git version ([\d.]+)', '1.5.6'): uprint(_('%s not found') % 'git >= 1.5.6') except: uprint(_('Could not check git program')) # check for flex/bison: if not inpath('flex', os.environ['PATH'].split(os.pathsep)): uprint(_('%s not found') % 'flex') if not inpath('bison', os.environ['PATH'].split(os.pathsep)): uprint(_('%s not found') % 'bison') if not inpath('xzcat', os.environ['PATH'].split(os.pathsep)): uprint(_('%s not found') % 'xzcat')