def stat(): ''' Get the stat data and storage into database. ''' update_iptables() cs, ec = get_stdout(IPTABLES + ('-nxvL', CHAIN_NAME)) # get current stat if ec == 3: return False # No privilege get_stdout(IPTABLES + ('-Z',CHAIN_NAME)) # empty IPTABLES stat t = {} # dict PORT=>(pkgs, bytes) for i in re.findall(r"^\s*(\d+)\s+(\d+).+[sd]pt:(\d+)", cs, re.M): # i = Pkgs Traffic(byte) port port = int(i[2]) if not port in t: t[port] = [0, 0] t[port][0] += int(i[0]) t[port][1] += int(i[1]) query = [] users = user.get_all(only_active=True) for u in users: port = u.get_port() if port in t: ti = t[port] if ti[0] and ti[1]: query.append((u.id, ti[0], ti[1])) # skip empty record cursor = database.conn.cursor() cursor.executemany('INSERT INTO traffic(user,packages,traffic) VALUES (?, ?, ?)', query) cursor.close() database.conn.commit()
def stat(): ''' Get the stat data and storage into database. Add IPTABLES rules if necessary. ''' cs, ec = get_stdout(IPTABLES + ('-nxvL', 'SSLAND')) # get current stat if ec == 3: return False # No privilege if ec == 1: # chain not found # create the chain get_stdout(IPTABLES + ('-N','SSLAND')) get_stdout(IPTABLES + ('-I','OUTPUT','1','-j','SSLAND')) get_stdout(IPTABLES + ('-Z','SSLAND')) # empty IPTABLES stat t = {} # dict PORT=>(pkgs, bytes) for i in re.findall(r"^\s*(\d+)\s+(\d+).+dpt:(\d+)", cs, re.M): # i = Pkgs Traffic(byte) port t[int(i[2])] = ( int(i[0]) , int(i[1]) ) query = [] users = user.get_all() for u in users: if u.suspended: continue port = config.user_port(u.id) if port in t: ti = t[port] query.append((u.id, ti[0], ti[1])) else: get_stdout(IPTABLES + ('-A','SSLAND','-p','tcp','--dport',str(port))) cursor = database.conn.cursor() cursor.executemany('INSERT INTO traffic(user,packages,traffic) VALUES (?, ?, ?)', query) cursor.close() database.conn.commit()
def run(scope, action, argv): if scope == 'user': if action == 'list': print("id\tusername\tsuspended\tport\tsskey") for u in user.get_all(): print('\t'.join(( str(item) for item in (u.id, u.username, 'True' if u.suspended else 'False', config.user_port(u.id), u.sskey) ))) elif action == 'add': username = argv[0] if len(argv) > 0 else raw_input('Username: '******'Shadowsocks Key: ') u = user.User() u.username = username u.set_password(password) u.sskey = sskey u.create() u.write() elif action == 'del': user.delete_users(*argv) elif action in ['suspend', 'unsuspend']: user.batch_update(*argv, suspended=(1 if action == 'suspend' else 0)) elif action == 'passwd': username = argv[0] if len(argv) > 0 else raw_input('Username: '******'sskey': username = argv[0] if len(argv) > 0 else raw_input('Username: '******'Shadowsocks Key: ') u = user.get_by_username(username) u.sskey = sskey u.write() else: print_help() elif scope == 'sys': if action == 'update': import ssmgr ssmgr.update_and_restart() try: import os with open(config.TMP_ROOT + "/ssland.web.pid", 'r') as f: pid = int(f.read()) os.kill(pid, 0) from utils import get_stdout get_stdout(["./web.py", "-d", "restart"]) except: pass else: print_help() else: print_help()
def _call_delnode(self, node): "Remove node (how depends on cluster stack)" rc = True if utils.cluster_stack() == "heartbeat": cmd = (self.hb_delnode % node) else: ec, s = utils.get_stdout("%s -p" % self.crm_node) if not s: common_err('%s -p could not list any nodes (rc=%d)' % (self.crm_node, ec)) rc = False else: partition_l = s.split() if node in partition_l: common_err("according to %s, node %s is still active" % (self.crm_node, node)) rc = False cmd = "%s --force -R %s" % (self.crm_node, node) if not rc: if config.core.force: common_info('proceeding with node %s removal' % node) else: return False ec = utils.ext_cmd(cmd) if ec != 0: common_warn('"%s" failed, rc=%d' % (cmd, ec)) return False return True
def do_status(self, context): ''' Quick cluster health status. Corosync status, DRBD status... ''' stack = utils.cluster_stack() if not stack: err_buf.error( "No supported cluster stack found (tried heartbeat|openais|corosync)" ) if utils.cluster_stack() == 'corosync': print "Services:" for svc in ["corosync", "pacemaker"]: info = utils.service_info(svc) if info: print "%-16s %s" % (svc, info) else: print "%-16s unknown" % (svc) rc, outp = utils.get_stdout(['corosync-cfgtool', '-s'], shell=False) if rc == 0: print "" print outp else: print "Failed to get corosync status"
def admin_cli(): argv = request.forms.get('cmd').split(' ') out, rtn = utils.get_stdout([sys.executable, './cli.py'] + argv) return { "retval": rtn, "output": out }
def crm_mon(opts=''): """ Run 'crm_mon -1' opts: Additional options to pass to crm_mon returns: rc, stdout """ global _crm_mon if _crm_mon is None: if not utils.is_program("crm_mon"): raise IOError("crm_mon not available, check your installation") _, out = utils.get_stdout("crm_mon --help") if "--pending" in out: _crm_mon = "crm_mon -1 -j" else: _crm_mon = "crm_mon -1" status_cmd = "%s %s" % (_crm_mon, opts) return utils.get_stdout(utils.add_sudo(status_cmd))
def crm_mon(opts=''): """ Run 'crm_mon -1' opts: Additional options to pass to crm_mon returns: rc, stdout """ has_crm_mon() status_cmd = "%s -1 %s" % (crm_mon_prog, opts) return utils.get_stdout(utils.add_sudo(status_cmd))
def is_running(self, id): ''' Is this resource running? ''' if not is_live_cib(): return False test_id = self.rsc_clone(id) or id rc, outp = get_stdout(self.rsc_status % test_id, stderr_on=False) return outp.find("running") > 0 and outp.find("NOT") == -1
def _diff_this(pssh, local_path, nodes, this_node): by_host = _diff_slurp(pssh, nodes, local_path) for host, result in by_host: if isinstance(result, pssh.Error): raise ValueError("Failed on %s: %s" % (host, str(result))) _, _, _, path = result _, s = utils.get_stdout("diff -U 0 -d -b --label %s --label %s %s %s" % (host, this_node, path, local_path)) utils.page_string(s)
def _diff(pssh, local_path, nodes): by_host = _diff_slurp(pssh, nodes, local_path) for host, result in by_host: if isinstance(result, pssh.Error): raise ValueError("Failed on %s: %s" % (host, str(result))) h1, r1 = by_host[0] h2, r2 = by_host[1] _, s = utils.get_stdout("diff -U 0 -d -b --label %s --label %s %s %s" % (h1, h2, r1[3], r2[3])) utils.page_string(s)
def crm_mon(opts=''): """ Run 'crm_mon -1' opts: Additional options to pass to crm_mon returns: rc, stdout """ global _crm_mon if _crm_mon is None: prog = utils.is_program("crm_mon") if not prog: raise IOError("crm_mon not available, check your installation") _, out = utils.get_stdout("%s --help" % (prog)) if "--pending" in out: _crm_mon = "%s -1 -j" % (prog) else: _crm_mon = "%s -1" % (prog) status_cmd = "%s %s" % (_crm_mon, opts) return utils.get_stdout(utils.add_sudo(status_cmd))
def get_revision_svn(repo): ''' This command need svn in PATH ''' cmd = "svn info %s" % repo for line in get_stdout(cmd): if line.startswith('Last') or (line.startswith('Revisi') and (line.find('cambio') != -1)): pos = line.rindex(':') return int(line[pos+2:]) return -1
def git_log_gen(repo, number=1, extra=''): ''' generator of commits ''' with utils.working_directory(repo): for line in get_stdout('git log -%d %s' % (number, extra)): if line.startswith('commit'): parts = line.split(' ') assert(len(parts) == 2) commit_name = parts[1] yield commit_name
def can_use_lrmadmin(): from distutils import version # after this glue release all users can get meta-data and # similar from lrmd minimum_glue = "1.0.10" rc, glue_ver = get_stdout("%s -v" % lrmadmin_prog, stderr_on=False) if not glue_ver: # lrmadmin probably not found return False v_min = version.LooseVersion(minimum_glue) v_this = version.LooseVersion(glue_ver) return v_this >= v_min or \ (userdir.getuser() in ("root", config.path.crm_daemon_user))
def get_changeset_from_timestamp(repo, timestamp_searched): with utils.working_directory(repo): lines = [] for line in get_stdout(r'git log --format="%H;%cd" --date=rfc'): lines.append(line) for line in reversed(lines): chunks = line.split(";") assert(len(chunks) == 2) changeset = chunks[0] timestamp = int(time.mktime(parsedate(chunks[1]))) if timestamp_searched == timestamp: return changeset raise Exception('Error in get git hash from timestamp {}'.format(timestamp_searched))
def _worddiff(self, s1, s2): s = None f1 = utils.str2tmp(s1) f2 = utils.str2tmp(s2) if f1 and f2: rc, s = utils.get_stdout("wdiff %s %s" % (f1, f2)) try: os.unlink(f1) except: pass try: os.unlink(f2) except: pass return s
def get_timestamp_from_changeset(repo, changeset_searched): ''' generator of commits ''' with utils.working_directory(repo): lines = [] for line in get_stdout(r'git log --format="%H;%cd" --date=rfc'): lines.append(line) for line in reversed(lines): chunks = line.split(";") assert(len(chunks) == 2) changeset = chunks[0] timestamp = int(time.mktime(parsedate(chunks[1]))) if changeset_searched == changeset: return timestamp raise Exception('Error in get timestamp from changeset {}'.format(changeset_searched))
def _unidiff(self, s1, s2, t1, t2): s = None f1 = utils.str2tmp(s1) f2 = utils.str2tmp(s2) if f1 and f2: rc, s = utils.get_stdout("diff -U 0 -d -b --label %s --label %s %s %s" % (t1, t2, f1, f2)) try: os.unlink(f1) except: pass try: os.unlink(f2) except: pass return s
def get_position_git_from_changeset(repo, changeset): with working_directory(repo): i = 1 lines = [] for line in get_stdout('git log'): lines.append(line) for line in reversed(lines): if line.startswith('commit'): parts = line.split(' ') if(len(parts) == 2): commit_name = parts[1] if commit_name == changeset: return i else: i += 1 return -1
def get_changeset_git_from_position(repo, position = 0): with utils.working_directory(repo): i = 1 lines = [] for line in get_stdout('git log'): lines.append(line) for line in reversed(lines): if line.startswith('commit'): parts = line.split(' ') assert(len(parts) == 2) commit_name = parts[1] if i == position: return commit_name else: i += 1 raise Exception('Error in get git hash from position {}'.format(position))
def can_use_lrmadmin(): from distutils import version # after this glue release all users can get meta-data and # similar from lrmd minimum_glue = "1.0.10" rc, glue_ver = get_stdout("%s -v" % lrmadmin_prog, stderr_on=False) if not glue_ver: # lrmadmin probably not found return False v_min = version.LooseVersion(minimum_glue) v_this = version.LooseVersion(glue_ver) if v_this < v_min: return False if userdir.getuser() not in ("root", config.path.crm_daemon_user): return False if not (is_program(lrmadmin_prog) and is_process("lrmd")): return False return utils.ext_cmd(">/dev/null 2>&1 %s -C" % lrmadmin_prog) == 0
def _unidiff(self, s1, s2, t1, t2): s = None f1 = utils.str2tmp(s1) f2 = utils.str2tmp(s2) if f1 and f2: rc, s = utils.get_stdout( "diff -U 0 -d -b --label %s --label %s %s %s" % (t1, t2, f1, f2)) try: os.unlink(f1) except: pass try: os.unlink(f2) except: pass return s
def do_status(self, context): ''' Quick cluster health status. Corosync status, DRBD status... ''' stack = utils.cluster_stack() if not stack: err_buf.error("No supported cluster stack found (tried heartbeat|openais|corosync)") if utils.cluster_stack() == 'corosync': print "Services:" for svc in ["corosync", "pacemaker"]: info = utils.service_info(svc) if info: print "%-16s %s" % (svc, info) else: print "%-16s unknown" % (svc) rc, outp = utils.get_stdout(['corosync-cfgtool', '-s'], shell=False) if rc == 0: print "" print outp else: print "Failed to get corosync status"
def crm_resource_support(): rc, s = get_stdout("crm_resource --list-standards", stderr_on=False) return s != ""
def run_tests(node, parameters, compiler_replace_maps, unittests): oldcwd = os.getcwd() artifacts_dir = parameters.rootdir artifacts_dir = utils.get_norm_path(artifacts_dir) artifacts_dir = artifacts_dir.replace('\\', '/') cmakelib_dir = parameters.cmakefiles cmakelib_dir = utils.get_norm_path(cmakelib_dir) cmakelib_dir = cmakelib_dir.replace('\\', '/') cmake3p_dir = parameters.prefix cmake3p_dir = utils.get_norm_path(cmake3p_dir) cmake3p_dir = cmake3p_dir.replace('\\', '/') cmake_prefix = parameters.prefix cmake_prefix = utils.get_norm_path(cmake_prefix) cmake_prefix = cmake_prefix.replace('\\', '/') cmake_third_party_dir = parameters.third_party_dir cmake_third_party_dir = utils.get_norm_path(cmake_third_party_dir) cmake_third_party_dir = cmake_third_party_dir.replace('\\', '/') package = node.get_package_name() package_upper = node.get_package_name_norm_upper() version = node.get_version() packing = node.is_packing() if not packing: logging.warning("No need run_tests, because wasn't generated a package") return 0 # prepare unittests # can be a file or content unittest_value = node.get_unittest() if unittest_value is not None: build_modes = node.get_build_modes() for plat, build_mode in product(platforms, build_modes): builddir = node.get_build_directory(plat, build_mode) path_test = os.path.join(builddir, build_unittests_foldername) utils.trymkdir(path_test) # is is a file unittest_path = os.path.join(builddir, unittest_value) if os.path.isfile(unittest_path): with open(unittest_path, 'rt') as f: unittest_value = f.read() with open(os.path.join(path_test, 'main.cpp'), 'wt') as f: f.write(unittest_value) if parameters.fast: logging.debug('skipping for because is in fast mode: "prepare"') break else: logging.warning('[%s] No test present.' % package) folder_3rdparty = parameters.third_party_dir output_3rdparty = os.path.join(folder_3rdparty, node.get_base_folder()) build_modes = node.get_build_modes() for plat, build_mode in product(platforms, reversed(build_modes)): for compiler_c, compiler_cpp, generator, _, _, env_modified, _ in node.compiler_iterator(plat, compiler_replace_maps): # verify md5sum workspace = node.get_workspace(plat) utils.trymkdir(workspace) with utils.working_directory(workspace): prefix_package = os.path.join(parameters.prefix, '%s.tar.gz' % workspace) prefix_package_md5 = os.path.join(output_3rdparty, '%s.md5' % workspace) if os.path.exists(prefix_package) and os.path.exists(prefix_package_md5): with open(prefix_package_md5, 'rt') as f: md5sum = f.read().strip() try: logging.debug("expected md5: %s" % md5sum) for line in utils.get_stdout('cmake -E md5sum %s' % prefix_package, env_modified, 'cmake'): if len(line) > 0: # md5sum filename chunks = line.split(' ') chunks = filter(None, chunks) assert(len(chunks) > 0) md5sum_real = chunks[0] logging.debug("real md5: %s" % md5sum_real) if (md5sum != md5sum_real): logging.error('Error en generated md5sum file!!!') logging.error('Expected: %s' % md5sum) logging.error('Found: %s' % md5sum_real) # add error to node node.ret += 1 except utils.NotFoundProgram: logging.info('can\'t verify md5 because not found cmake') else: logging.warning('Skipping verification md5 because don\'t exists package or md5') logging.info('running unittests. Build mode: %s Platform: %s' % (build_mode, plat)) # OJO con borrar cmake3p, se borra la marca # node.remove_cmake3p( cmake3p_dir ) builddir = os.path.join(oldcwd, node.get_build_directory(plat, build_mode)) logging.info('Using builddir %s' % builddir) unittest_folder = os.path.join(builddir, build_unittests_foldername) unittest_found = os.path.join(unittest_folder, 'main.cpp') unittest_found = unittest_found.replace('\\', '/') unittest_root = os.path.join(oldcwd, build_unittests_foldername) if os.path.exists(unittest_found): logging.info('Search cmakelib in %s' % cmakelib_dir) if os.path.isdir(os.path.join(cmakelib_dir)): with utils.working_directory(unittest_folder): generator_extra = '' if generator is not None: generator_extra = '-G"%s"' % generator find_packages = [] find_packages.append(package) for dep in node.get_depends_raw(): package_name = dep.get_package_name() find_packages.append(package_name) find_packages_str = ';'.join(find_packages) # remove CMakeCache.txt for avoid problems when # change of generator utils.tryremove('CMakeCache.txt') utils.tryremove('cmake_install.cmake') utils.tryremove('install_manifest.txt') utils.tryremove_dir('CMakeFiles') ''' TODO: refactor: prefix = DEPENDS_PATH (cmake3p) (artifacts) cmakefiles = CMAKI_PATH, CMAKE_MODULE_PATH (cmakelib) third-party-dir = CMAKE_PREFIX_PATH (directorio artifacts/cmaki_find_package) (3rdparty) rootdir = ARTIFACTS_PATH, es la base de donde esta build.py (cmaki_generator) (scripts de generacion) tambien podria ser CMAKI_PWD CMAKI_INSTALL: se espera tener instalado el cmaki_identifier ''' cmd = 'cmake %s %s -DARTIFACTS_PATH="%s" -DCMAKI_COMPILER="%s" -DCMAKI_PLATFORM="%s" -DCMAKE_MODULE_PATH="%s" -DPACKAGE="%s" -DPACKAGE_UPPER="%s" -DCMAKE_BUILD_TYPE="%s" -DAVOID_USE_HTTP=1 -DINSTALL_SIMPLE=1 -DCMAKE_PREFIX_PATH="%s" -DUNITTEST_PATH="%s" -DDEPENDS_PATH="%s" -DFIND_PACKAGES="%s" -DCMAKI_DEBUG=TRUE && cmake --build . --config %s --target install && ctest . -C %s --output-on-failure -VV' % (unittest_root, generator_extra, artifacts_dir, get_identifier('COMPILER'), get_identifier('ALL'), cmakelib_dir, package, package_upper, build_mode, cmake_third_party_dir, unittest_found, cmake_prefix, find_packages_str, build_mode, build_mode) ret = utils.safe_system(cmd, env=env_modified) node.ret += abs(ret) if ret != 0: unittests[ '%s - %s' % (package, version) ] = 'ERROR: Fail test' else: unittests[ '%s - %s' % (package, version) ] = 'OK: Pass test' else: unittests[ '%s - %s' % (package, version) ] = 'WARN: No cmakelib available' else: unittests[ '%s - %s' % (package, version) ] = 'WARN: No unittest found' # successful return True
def quorumtool(*args): return utils.get_stdout(['corosync-quorumtool'] + list(args), shell=False)
def update_and_restart(): ''' Update Shadowsocks Config file, then restart shadowsocks. ''' update_conf() utils.get_stdout(config.SS_EXEC + ["-d", "restart"])
def update_iptables(): ''' Add IPTABLES rules if necessary. ''' cs, ec = get_stdout(IPTABLES + ('-nxvL', CHAIN_NAME)) # get current stat if ec == 3: return False # No privilege if ec == 1: # chain not found # create the chain get_stdout(IPTABLES + ('-N',CHAIN_NAME)) get_stdout(IPTABLES + ('-I','INPUT','1','-j',CHAIN_NAME)) get_stdout(IPTABLES + ('-I','OUTPUT','1','-j',CHAIN_NAME)) sport = set(int(r) for r in re.findall(r"\bspt:(\d+)", cs, re.M)) dport = set(int(r) for r in re.findall(r"\bdpt:(\d+)", cs, re.M)) users = user.get_all(only_active=True) for u in users: port = u.get_port() if not port in sport: get_stdout(IPTABLES + ('-A',CHAIN_NAME,'-p','tcp','--sport',str(port))) if not port in dport: get_stdout(IPTABLES + ('-A',CHAIN_NAME,'-p','tcp','--dport',str(port)))
def do_diff(self, context): "usage: diff" rc, s = utils.get_stdout(utils.add_sudo("%s -d" % self.extcmd_stdout)) utils.page_string(s)
def admin_cli(): argv = request.forms.get('cmd').split(' ') out, rtn = utils.get_stdout([sys.executable, './cli.py'] + argv) return {"retval": rtn, "output": out}