def get_version(cls): """Gets version of program -> version Looks for version in the following places (by order): 1) <INSTALL_PATH>/version.txt (if it exists) 2) debian/changelog (if it exists - parsed with dpkg-parsechangelog) 3) `autoversion HEAD` """ version_file = join(cls.INSTALL_PATH, "version.txt") if lexists(version_file): return file(version_file).readline().strip() orig_cwd = os.getcwd() if cls.INSTALL_PATH: os.chdir(cls.INSTALL_PATH) try: if not exists("debian/changelog"): output = getoutput("autoversion HEAD") version = output else: output = getoutput("dpkg-parsechangelog") version = [ line.split(" ")[1] for line in output.split("\n") if line.startswith("Version:") ][0] except ExecError: os.chdir(orig_cwd) return "?" os.chdir(orig_cwd) return version
def refresh(self): self.uri_release.download() self.uri_release_gpg.download() executil.getoutput("gpgv", "--keyring", self.chanko.trustedkeys, self.uri_release_gpg.path, self.uri_release.path) release_content = file(self.uri_release.path).read() for uri_index in self.uri_indexes: # skip download if latest if self._index_in_release(uri_index.path, release_content): continue # attempt download of Packages.bz2, fallback to Packages.gz try: uri_index.download() except executil.ExecError, e: if uri_index.filename == "Packages.bz2" and not e.exitcode == 22: raise e print "* info: Packages.bz2 not available, falling back to gzip..." uri_index.url = uri_index.url.replace("bz2", "gz") uri_index.download() # verify integrity, delete on failure if not self._index_in_release(uri_index.path, release_content): os.remove(uri_index.path) raise Error("verification failed: ", uri_index.path)
def apply_overlay(src, dst, olist_path): orig_cwd = os.getcwd() os.chdir(src) executil.getoutput("tar --create --files-from=%s | tar --extract --directory %s" % (olist_path, executil.mkarg(dst))) os.chdir(orig_cwd)
def __init__(self): TempFile.__init__(self, prefix='key_') os.remove(self.path) executil.getoutput("ssh-keygen -N '' -f %s" % self.path) os.remove(self.path + ".pub") PrivateKey.__init__(self, self.path)
def apply_overlay(src, dst, olist_path): orig_cwd = os.getcwd() os.chdir(src) executil.getoutput( "tar --create --files-from=%s | tar --extract --directory %s" % (olist_path, executil.mkarg(dst))) os.chdir(orig_cwd)
def _is_alive(self): try: getoutput("/etc/init.d/postgresql", "status") except ExecError, e: if e.exitcode == 3: # ie. stopped return False else: raise Error("Unknown postgresql status exitcode: %s" % e.exitcode)
def _is_alive(self): try: getoutput('/etc/init.d/postgresql', 'status') except ExecError, e: if e.exitcode == 3: #ie. stopped return False else: raise Error("Unknown postgresql status exitcode: %s" % e.exitcode)
def _is_signed(fpath, keyring): fpath_sig = fpath + ".sig" if not exists(fpath_sig): return False try: executil.getoutput("gpg --keyring=%s --verify" % keyring, fpath_sig) return True except: return False
def start(cls): if cls.is_running(): return retries = 2 for i in range(retries): try: executil.getoutput(cls.INIT_SCRIPT, "start") return except executil.ExecError, e: pass
def get_version(): try: if not exists("debian/changelog"): return getoutput("autoversion HEAD") output = getoutput("dpkg-parsechangelog") version = [ line.split(" ")[1] for line in output.split("\n") if line.startswith("Version:") ][0] return version except ExecError: return None
def _get_public_ipaddr(cls): publicip_cmd = conf.Conf().publicip_cmd if publicip_cmd: try: return executil.getoutput(publicip_cmd) except executil.ExecError, e: pass
def _read_partition_table(path): partitions = [] for line in executil.getoutput("fdisk -l %s" % path).splitlines(): if line.startswith(path): partitions.append(Partition(line)) return partitions
def usage(self): if self.advanced_enabled: default_button_label = "Advanced Menu" default_return_value = "advanced" else: default_button_label = "Quit" default_return_value = "quit" #if no interfaces at all - display error and go to advanced if len(self._get_filtered_ifnames()) == 0: error = "No network adapters detected" if not self.advanced_enabled: fatal(error) self.console.msgbox("Error", error) return "advanced" #if interfaces but no default - display error and go to networking ifname = self._get_default_nic() if not ifname: error = "Networking is not yet configured" if not self.advanced_enabled: fatal(error) self.console.msgbox("Error", error) return "networking" #tklbam integration try: tklbam_status = executil.getoutput("tklbam-status --short") except executil.ExecError, e: if e.exitcode in (10, 11): #not initialized, no backups tklbam_status = e.output else: tklbam_status = ''
def termcap_get_lines(): buf = executil.getoutput("resize") m = re.search('LINES=(\d+)', buf) if not m: raise Error("can't parse `resize` output") return int(m.group(1))
def refresh(chanko, cache): releases = {} raw = executil.getoutput("apt-get %s --print-uris update" % cache.options) for r in raw.split("\n"): if "Translation" in r: continue # hack to fallback to old release files until properly supported if "InRelease" in r: for f in ("Release", "Release.gpg"): uri = Uri(r.replace("InRelease", f), destpath=cache.cache_lists) release = releases.get(uri.release, Release(uri.release, chanko)) release.add_uri(uri) releases[uri.release] = release continue uri = Uri(r, destpath=cache.cache_lists) release = releases.get(uri.release, Release(uri.release, chanko)) release.add_uri(uri) releases[uri.release] = release for release in releases.values(): release.refresh()
def _cmdcache(self, arg, sort=False): results = executil.getoutput("apt-cache %s %s" % (self.options, arg)) if sort: results = results.splitlines() results.sort() results = "\n".join(results) return results
def list_databases(): for line in getoutput(su('psql -l')).splitlines(): m = re.match(r'^ (\S+?)\s', line) if not m: continue name = m.group(1) yield name
def fmt_welcome(): welcome = "Welcome to %s" % socket.gethostname() kernel = executil.getoutput("uname -mr") sysversion = fmt_sysversion() if sysversion: welcome += ", " + sysversion + " (" + kernel + ")" return welcome
def _get_blockdev_path(path): p = executil.getoutput("udevadm info -q path -n %s" % path).lstrip('/') for path in ( join('/sys', p , 'device'), join('/sys/block', basename(p)), join('/dev', basename(p)) ): if not exists(path): raise Error('usbdev path error: %s' % path) return join('/dev', basename(p))
def get_uris(chanko, cache, packages, nodeps=False): try: cmd = "apt-get %s --print-uris -y install" % (cache.options) raw = executil.getoutput(cmd, *packages) except executil.ExecError, e: if re.search("Couldn\'t find package", e[2]): print "Couldn't find package '%s'" % e[2].split()[-1] return [] else: raise Error("get_uris raised error: ", e)
def _get_physdevpath(devname): """ugly hack to get the physical device path of first parent""" raw_output = executil.getoutput('udevadm info -a -n %s' % devname) for line in raw_output.splitlines(): line = line.strip() if line.startswith("looking at parent device '/devices/xen/vbd-"): return line.split()[-1].strip(":").strip("'") return None
def from_system(cls): try: system_version = file("/etc/turnkey_version").readline().strip() except: try: system_version = executil.getoutput("turnkey-version") except executil.ExecError: return None return cls.from_string(system_version)
def main(): print indent(2, fmt_welcome()) try: sysinfo = executil.getoutput("/usr/lib/sysinfo/sysinfo.py") print print indent(2, sysinfo) print except executil.ExecError: pass
def main(): print indent(2,fmt_welcome()) try: sysinfo = executil.getoutput("/usr/lib/sysinfo/sysinfo.py") print print indent(2, sysinfo) print except executil.ExecError: pass
def _expected_devpath(devname, devpaths): """ugly hack to test expected structure of devpath""" raw_output = executil.getoutput('udevadm info -a -n %s' % devname) for line in raw_output.splitlines(): line = line.strip() for devpath in devpaths: if line.startswith("looking at parent device '%s" % devpath): return True return False
def gateway(self): try: output = executil.getoutput("route -n") except executil.ExecError: return None for line in output.splitlines(): m = re.search("^0.0.0.0\s+(.*?)\s+(.*)\s+%s" % self.ifname, line, re.M) if m: return m.group(1) return None
def is_hybrid(self): output = executil.getoutput("fdisk", "-l", self.path) if "Hidden HPFS/NTFS" in output: return True if "Disk identifier: 0x00000000" in output: return False if "doesn't contain a valid partition table" in output: return False raise Error("unable to determine ISO hybrid status")
def gateway(self): try: output = executil.getoutput("route -n") except executil.ExecError: return None for line in output.splitlines(): m = re.search('^0.0.0.0\s+(.*?)\s+(.*)\s+%s' % self.ifname, line, re.M) if m: return m.group(1) return None
def refresh(self): self.uri_release.download() self.uri_release_gpg.download() executil.getoutput("gpgv", "--keyring", self.chanko.trustedkeys, self.uri_release_gpg.path, self.uri_release.path) release_content = file(self.uri_release.path).read() for uri_index in self.uri_indexes: # skip download if latest if self._index_in_release(uri_index.path, release_content): continue # perform download uri_index.download() # verify integrity, delete on failure if not self._index_in_release(uri_index.path, release_content): os.remove(uri_index.path) raise Error("verification failed: ", uri_index.path)
def restoredb(dbdump, dbname, tlimits=[]): manifest = file(join(dbdump, FNAME_MANIFEST)).read().splitlines() try: getoutput(su("dropdb " + dbname)) except: pass orig_cwd = os.getcwd() os.chdir(dbdump) try: command = "tar c %s 2>/dev/null" % " ".join(manifest) command += " | pg_restore --create --format=tar" for (table, sign) in tlimits: if sign: command += " --table=" + table command += " | " + su("psql") system(command) finally: os.chdir(orig_cwd)
def query(device=None): """query udev database and return device(s) information if no device is specified, all devices will be returned """ if device: cmd = "udevadm info --query all --name %s" % device else: cmd = "udevadm info --export-db" devices = [] for s in executil.getoutput(cmd).split('\n\n'): devices.append(Device(s)) return devices
def pgsql2fs(outdir, limits=[], callback=None): limits = DBLimits(limits) for dbname in list_databases(): if dbname not in limits or dbname == 'postgres' or re.match(r'template\d', dbname): continue if callback: callback(dbname) dumpdb(outdir, dbname, limits[dbname]) globals = getoutput(su("pg_dumpall --globals")) file(join(outdir, FNAME_GLOBALS), "w").write(globals)
def _expected_devpath(devname, devpaths): """ugly hack to test expected structure of devpath""" raw_output = executil.getoutput('udevadm info -a -n %s' % devname) for line in raw_output.splitlines(): line = line.strip() m = re.match("^looking at parent device '(.*)':", line) if m: devpath = m.group(1) for pattern in devpaths: if re.search(pattern, devpath): return True return False
def query(device=None, volinfo=True): """query udev database and return device(s) information if no device is specified, all devices will be returned optionally query volume info (vol_id) on disk devices """ if device: cmd = "udevadm info --query all --name %s" % device else: cmd = "udevadm info --export-db" devices = [] for s in getoutput(cmd + " 2>/dev/null").split('\n\n'): devices.append(Device(s, volinfo)) return devices
def fmt_base_distribution(): try: output = executil.getoutput("lsb_release -ircd") except executil.ExecError: return d = dict([line.split(':\t') for line in output.splitlines()]) codename = d['Codename'].capitalize() basedist = "%s %s %s" % (d['Distributor ID'], d['Release'], d['Codename'].capitalize()) if d['Codename'] in ('hardy', 'lucid', 'precise'): basedist += " LTS" return basedist
def get_ifnames(): """ returns list of interface names (up and down) """ ifnames = [] try: infos = executil.getoutput("/sbin/ifconfig -a | grep \"Link encap\" | awk '{print $1}'") except executil.ExecError: return None for line in infos.splitlines(): try: ifname = line.strip() ifnames.append(ifname) except ValueError: pass return ifnames
def _get_volinfo(self): if self.env.has_key('DEVTYPE') and self.env['DEVTYPE'] == 'disk': try: volume_info = getoutput('vol_id /dev/%s' % self.name) except ExecError: return for value in volume_info.splitlines(): name, val = value.split("=") if self.env.has_key(name): continue if not val: continue self.env[name] = val
def _get_filtered_ifnames(): ifnames = [] for ifname in netinfo.get_ifnames(): if ifname.startswith(('lo', 'tap', 'br', 'natbr', 'tun', 'vmnet', 'veth', 'wmaster')): continue ifnames.append(ifname) # handle bridged LXC where br0 is the default outward-facing interface defifname = conf.Conf().default_nic if defifname and defifname.startswith('br'): ifnames.append(defifname) bridgedif = executil.getoutput('brctl', 'show', defifname).split('\n')[1].split('\t')[-1] ifnames.remove(bridgedif) ifnames.sort() return ifnames
def usage(self): #if no interfaces at all - display error and go to advanced if len(self._get_filtered_ifnames()) == 0: self.console.msgbox("Error", "No network adapters detected") return "advanced" #if interfaces but no default - display error and go to networking ifname = self._get_default_nic() if not ifname: self.console.msgbox("Error", "Networking is not yet configured") return "networking" #tklbam integration try: tklbam_status = executil.getoutput("tklbam-status --short") except executil.ExecError, e: if e.exitcode in (10, 11): #not initialized, no backups tklbam_status = e.output else: tklbam_status = ''
def __init__(self): self.base = os.getcwd() self.config = os.path.join(self.base, 'config') self.architecture = getoutput("dpkg --print-architecture") self.trustedkeys = os.path.join(self.config, 'trustedkeys.gpg') self.sources_list = os.path.join(self.config, 'sources.list') for f in (self.sources_list, self.trustedkeys): if not os.path.exists(f): raise Error("required file not found: " + f) conf = ChankoConfig(os.path.join(self.config, 'chanko.conf')) os.environ['CCURL_CACHE'] = conf.ccurl_cache self.archives = os.path.join(self.base, 'archives') makedirs(os.path.join(self.archives, 'partial')) plan_path = os.path.join(self.base, 'plan') plan_cpp = conf.plan_cpp.replace("-", " -").strip() plan_cpp = plan_cpp.split(" ") if plan_cpp else [] self.plan = Plan(plan_path, self.architecture, plan_cpp) self.local_cache = LocalCache(self) self.remote_cache = RemoteCache(self)
def valid_ip(address): try: socket.inet_aton(address) return True except: return False ADMIN_USER = "******" DEFAULT_DOMAIN = "DOMAIN" DEFAULT_REALM = "domain.lan" HOSTNAME = getoutput('hostname -s').strip() NET_IP = getoutput('hostname -I').strip() NET_IP321 = NET_IP.split('.')[:-1] NET_IP321.reverse() NET_IP321 = '.'.join(NET_IP321) NET_IP4 = NET_IP.split('.')[-1] def main(): try: opts, args = getopt.gnu_getopt(sys.argv[1:], "h", ['help', 'pass='******'realm=', 'domain=']) except getopt.GetoptError, e: usage(e)
print >> sys.stderr, "Error:", s print >> sys.stderr, "Syntax: %s [options]" % sys.argv[0] print >> sys.stderr, __doc__ sys.exit(1) def main(): try: opts, args = getopt.gnu_getopt(sys.argv[1:], "h", ['help', 'pass='******'-h', '--help'): usage() elif opt == '--pass': password = val if not password: d = Dialog('TurnKey Linux - First boot configuration') password = d.get_password( "cherryepg Password", "Enter new password for the 'cherryepg' user account.") getoutput("echo \"%s\"\n\"%s\" | passwd cherryepg" % (password, password)) if __name__ == "__main__": main()
def is_running(cls): try: executil.getoutput('mysqladmin -s ping') return True except executil.ExecError: return False
return initialized_tklbam = False d = Dialog('TurnKey GNU/Linux - First boot configuration') while 1: retcode, apikey = d.inputbox("Initialize Hub services", TEXT_SERVICES, apikey, "Apply", "Skip") if not apikey or retcode == 1: break d.infobox("Linking TKLBAM to the TurnKey Hub...") try: getoutput("host -W 2 hub.turnkeylinux.org") except ExecError, e: d.error(CONNECTIVITY_ERROR) break try: getoutput('tklbam-init %s' % apikey) d.msgbox('Success! Linked TKLBAM to Hub', SUCCESS_TKLBAM) initialized_tklbam = True break except ExecError, e: d.msgbox('Failure', e.output) continue if initialized_tklbam: