def notify(title, message): cmd = [ libnotify_cmd ] for x in libnotify_defaults : cmd.append(x) cmd.append(title) cmd.append(message) process.execute(cmd)
def pip_install_packages(packages_list, warn_only=False): """ Install one or more packages, via PIP. The function just spawns :program:`pip` with argument ``install``. :param packages_list: a list of strings representing PIP packages names. For real-life examples, see the `upgrades` modules (notably `daemon`). :param warn_only: see :func:`install_packages`. .. versionadded:: 1.3 Before the WMI2, the package management foundations didn't exist. """ tname = stylize(ST_NAME, current_thread().name) pnames = u', '.join(stylize(ST_NAME, x) for x in packages_list) logging.notice(_(u'{0}: Installing needed packages {1} from source before ' u'continuing. Please wait…').format(tname, pnames)) out, err = process.execute([ 'pip', 'install' ] + packages_list) if err: logging.warn_or_raise(_(u'An error occured while installing package(s) ' u'{0}! PIP install Log follows:').format(pnames) + u'\n' + err, warn_only=warn_only) else: logging.notice(_(u'{0}: Successfully installed package(s) {1} via {2}.').format( tname, pnames, stylize(ST_NAME, 'PIP')))
def __executeSVN(self,command,arg="",split=False): command="svnlook --%s %s %s %s %s" % (self.type,self.txn,command,self.repos,arg) output=process.execute(command) if split: output=[x.strip() for x in output.split("\n") if x.strip()] return output
def backtrace_to_html(bt_filename, output): with open(output, "w") as afile: res = process.execute([runtime.python_path(), runtime.source_path("devtools/coredump_filter/core_proc.py"), bt_filename], check_exit_code=False, check_sanitizer=False, stdout=afile) if res.exit_code != 0: with open(output, "a") as afile: afile.write("\n") afile.write(res.std_err)
def apt_do_upgrade(): try: return process.execute(['unattended-upgrades']) # NOTE: we do not send an event / notification here. It's up # to the [much higher] calling function to do it. except: logging.exception(_(u'Error while running « unattended-upgrades »!')) return None, None
def countFilesByExtension(): filesByExtension = defaultdict(int) files = process.execute("git ls-files").split("\n")[0:-1] exts = map(lambda file: file.split(".")[-1] if "." in file else "NO EXT", files) for ext in exts: filesByExtension[ext] += 1 filesByExtension = map(lambda x: list(x), filesByExtension.items()) return sorted(filesByExtension, key=lambda x: x[1], reverse=True)
def countMergesByAuthor(): mergesByAuthor = list() for line in process.execute("git shortlog -s -e -n --merges").split("\n"): match = re.match(r"\s*(\d+)\t(.+) <(.+)>", line) if match: author = match.group(2).title() email = match.group(3) commits = match.group(1) mergesByAuthor.append([author, email, int(commits)]) return mergesByAuthor
def execute_benchmark(path, budget=None, threads=None): """ Run benchmark and return values :param path: path to benchmark binary :param budget: time budget, sec :param threads: number of threads to run benchmark :return: map of benchmark values """ benchmark_path = runtime.binary_path(path) cmd = [benchmark_path, "-f", "json"] if budget is not None: cmd += ["-b", str(budget)] if threads is not None: cmd += ["-t", str(threads)] res = process.execute(cmd) return json.loads(res.std_out)
def compile(self, source): '''Execute the OWL compiler in a separate process. Args: source: The path to the source file to be compiled. Returns: A 2-tuple containing two strings. The first is the path to the compiled executable and the second is a log of standard error output. Raises: CompileError: If compilation failed. ''' code, out, err = process.execute(sys.executable, r'C:\Users\rjs\Documents\dev\PycharmProjects\owl_basic\compiler\main.py', source) print code print out print err return "unknown.exe", out, err
def _read_clipboard(self): return process.execute('pbpaste')[0]
def _write_clipboard(self, value): process.execute('pbcopy', process_input=value)
# split each string into metric name + value self.metrics = {} for v in rv: nameval = v.split('=') if len(nameval) == 2: try: if nameval[0] in ['rootdelay', 'rootdisp']: # convert from milliseconds to seconds self.metrics[nameval[0]] = round( float(nameval[1]) / 1000.0, 9) elif nameval[0] in _aliases: # convert from milliseconds to seconds, alias self.metrics[_aliases[nameval[0]]] = round( float(nameval[1]) / 1000.0, 9) else: self.metrics[nameval[0]] = float(nameval[1]) except ValueError: # ignore non-numeric values pass def getmetrics(self): return self.metrics if __name__ == "__main__": import pprint import process nv = NTPVars(process.execute('vars')) v = nv.getmetrics() pprint.pprint(v)
# |---- [Final_files] --> summary result files (Tweets & Users) # if __name__ == '__main__': # Pre processing # Will read all json files from input directory and will produce new files # after data cleaning. # Will also produce a Bulkfile containing all data. pre.execute() # Processing # Will read bulk file and calculate the number of tweets and users # found in each day. # The result is two csv files. Tweets and Users csv pro.execute() # Final merge of Tweets and Users Datasets # todo: Implement a for loop to cover all tweets and users csv files rootdir = './Final-files' file = 'processed-tweets.csv' filepath = rootdir + os.sep + file dfTweets = pd.read_csv(filepath, names=['Date', 'Hour', 'Tweets']) rootdir = './Final-files' file = 'processed-users.csv' filepath = rootdir + os.sep + file dfUsers = pd.read_csv(filepath, names=['Date', 'Hour', 'Users']) df = pd.merge(dfTweets, dfUsers, left_on=['Date', 'Hour'],
def install_packages(packages_for_distros, warn_only=False): """ Installs a package, the standard way (= recommended by the underlying distro). Will use `apt-get` (or `python-apt`) on debian and derivatives, `emerge` on gentoo, etc. :param packages_for_distros: a `dict`, whose *keys* are constants from `licorn.foundations.constants.distros`, and *values* are lists of strings representing packages names. Packages names can be different on the various distros, they just have to be valid. For real-life examples, see in the `upgrades` modules (notably daemon). .. note:: the special *key/value* pair ``distros.UNKNOWN`` is used for user messages only. Put the « human-readable » form of the package name here. :param warn_only: a boolean indicating that any error/exception should be considered harmless and should not halt the current operations. When installing a bunch of packages, this could eventually be useful. Defaults to ``False`` (=raise exceptions and stop). .. note:: all implementations are not yet done. As of 20120227, Only Debian/Ubuntu works via `apt-get`. .. warning:: Currently, this function does the job **automatically**, eg. it won't ask for any confirmation before trying to install the asked packages. .. versionadded:: 1.3 """ # this is not cool to do this here. We have a circular loop. from licorn.core import LMC if not LMC.configuration.distro in packages_for_distros: logging.warn_or_raise(_(u'Your distro is not yet supported to automatically install ' u'package {0}, skipping it.').format(stylize(ST_NAME, packages_for_distros[distros.UNKNOWN])), warn_only=warn_only) if LMC.configuration.distro in (distros.LICORN, distros.UBUNTU, distros.DEBIAN): try: packages = packages_for_distros[LMC.configuration.distro] except KeyError: # fall back to debian package name as last resort. This should still # work in the vast majority of cases. packages = packages_for_distros[distros.DEBIAN] tname = stylize(ST_NAME, current_thread().name) pnames = u', '.join(stylize(ST_NAME, x) for x in packages) # TODO: re-implement this with internal `python-apt` instead of forking # a non-interactive :program:`apt-get` process. os.environ['DEBIAN_FRONTEND'] = 'noninteractive' logging.notice(_(u'{0}: Installing needed packages {1} before continuing. Please wait…').format(tname, pnames)) out, err = process.execute([ 'apt-get', 'install', '--quiet', '--yes', '--force-yes', '--verbose-versions' ] + packages) if err: logging.warn_or_raise(_(u'{0}: An error occured while installing package(s) ' u'{1}! Apt-Get Log follows:').format(tname, pnames) + u'\n' + err, warn_only=warn_only) else: logging.notice(_(u'{0}: Successfully installed package(s) {1} via {2}.').format( tname, pnames, stylize(ST_NAME, 'apt-get'))) else: # TODO: implement emerge/RPM/YUM whatever here. logging.warn_or_raise(_(u'Installing packages on your distro is not yet ' u'supported, sorry. You should install {0} yourself before ' u'continuing.').format(pnames), warn_only=warn_only)
rv = [v.strip() for v in lines.split(',')] # split each string into metric name + value self.metrics = {} for v in rv: nameval = v.split('=') if len(nameval) == 2: try: if nameval[0] in ['rootdelay', 'rootdisp']: # convert from milliseconds to seconds self.metrics[nameval[0]] = round(float(nameval[1]) / 1000.0, 9) elif nameval[0] in _aliases: # convert from milliseconds to seconds, alias self.metrics[_aliases[nameval[0]]] = round(float(nameval[1]) / 1000.0, 9) else: self.metrics[nameval[0]] = float(nameval[1]) except ValueError: # ignore non-numeric values pass def getmetrics(self): return self.metrics if __name__ == "__main__": import pprint import process nv = NTPVars(process.execute('vars')) v = nv.getmetrics() pprint.pprint(v)
class NTPVars(object): def __init__(self, lines=None, elapsed=0): if not isinstance(lines, str): # multiple lines - join them lines = " ".join(lines) # single string - split it by commas, remove whitespace rv = [v.strip() for v in lines.split(',')] # split each string into metric name + value if lines.count('=') < 5 and len(rv) >= 12: # chronyd has 12 fields, and no equals signs self.metrics = parse_chronyd_vars(rv) else: # otherwise assume ntpd self.metrics = parse_ntpd_vars(rv) self.metrics['varstime'] = elapsed def getmetrics(self): return self.metrics if __name__ == "__main__": import pprint import process lines, elapsed = process.execute('vars', debug=False) nv = NTPVars(lines, elapsed) v = nv.getmetrics() pprint.pprint(v)
def git_log(): return process.execute(GIT_LOG_COMMAND).split(".:*-*:.\n")[1:]
def compile(filename, options): if options.use_clr: # .NET Framework import clr clr.AddReference('System.Xml') from System.Xml import XmlTextWriter, Formatting if not options.use_clr: # TODO: Use non-recursive code for the flowgraph sys.setrecursionlimit(2000) data = readFile(filename) detokenHandle = detokenize(data, options) data, physical_to_logical_map, line_offsets, line_number_prefixes = indexLineNumbers(detokenHandle, options) data = warnOnMissingNewline(data) parse_tree = syntax.parser.parse(data, options) setSourceDebugging(data, line_offsets, line_number_prefixes, parse_tree) setParents(parse_tree, options) splitComplexNodes(parse_tree, options) simplifyAst(parse_tree, options) line_mapper = createLineMapper(parse_tree, physical_to_logical_map) dv = extractData(parse_tree, options) createForwardControlFlowGraph(parse_tree, line_mapper, options) entry_points = locateEntryPoints(parse_tree, line_mapper, options) convertLongjumpsToExceptions(parse_tree, line_mapper, options) convertSubroutinesToProcedures(parse_tree, entry_points, line_mapper, options) correlateLoops(entry_points, options) basic_blocks = identifyBasicBlocks(entry_points, options) ordered_basic_blocks = orderBasicBlocks(basic_blocks, options) typecheck(parse_tree, entry_points, options) # Array visitor - rewrite array expressions stv = buildSymbolTables(entry_points, options) dumpXmlAst(parse_tree, filename + "_ast.xml", options) dumpXmlCfg(parse_tree, filename + "_cfg.graphml", options) dumpXmlBlocks(basic_blocks, filename + "_blocks.graphml", options) output_name = os.path.splitext(os.path.basename(filename))[0] source_path = os.path.abspath(filename) if options.use_clr: from codegen.clr.generate import AssemblyGenerator ag = AssemblyGenerator(line_mapper) exe_filename = ag.generateAssembly(source_path, output_name, stv.globalSymbols, dv, ordered_basic_blocks) if options.peverify: # Run PEVerify on the resulting executable logging.debug("Verifying") peverify_exe = r'C:\Program Files\Microsoft SDKs\Windows\v6.0A\Bin\PEVerify.exe' process.execute(peverify_exe, exe_filename) if options.create_il: # Create debuggable CIL files by disassebling and reassembling the # executable # Run ILDASM on the produced file logging.debug("Disassembling to CIL") ildasm_exe = r"C:\Program Files\Microsoft SDKs\Windows\v6.0A\Bin\x64\ildasm.exe" logging.debug("ILDAsm.exe from %s", ildasm_exe) il_filename = exe_filename[:-3] + 'il' process.execute(ildasm_exe, '/OUT=%s' % il_filename, exe_filename) logging.debug("Reassembling with CIL debug info") clr.AddReference("Microsoft.Build.Utilities") from Microsoft.Build.Utilities import ToolLocationHelper, TargetDotNetFrameworkVersion ilasm_exe = ToolLocationHelper.GetPathToDotNetFrameworkFile("ILAsm.exe", TargetDotNetFrameworkVersion.VersionLatest) logging.debug("ILAsm.exe from %s", ilasm_exe) process.execute(ilasm_exe, '/EXE', '/DEBUG', il_filename)
def git_ls_files(): return process.execute(GIT_LS_FILES_COMMAND).split("\n")[0:-1]
def is_desktop_active(): data = execute(DESKTOP_CMD) return "false" in data
def git_blame(file): try: return split_blame(process.execute(GIT_BLAME_COMMAND % file)) except: print "Error getting blame of %s" % file raise
def countCommits(): return int(process.execute("git rev-list --no-merges --count HEAD"))