def ipython_qstat(self, arg): """ Prints jobs of current user. """ from subprocess import Popen, PIPE from IPython.utils.text import SList from itertools import chain # get user jobs ids whoami = Popen(['whoami'], stdout=PIPE).communicate()[0].rstrip().lstrip() jobs = Popen(['qstat', '-u', whoami], stdout=PIPE).communicate()[0].split('\n') if len(jobs) == 1: return ids = SList(jobs[5:-1]).fields(0) # now gets full info jobs = SList( Popen(['qstat', '-f'] + ids, stdout=PIPE).communicate()[0].split('\n')) names = [ u[u.find('=') + 1:].lstrip().rstrip() for u in jobs.grep('Job_Name') ] mpps = [int(u[u.find('=') + 1:]) for u in jobs.grep('Resource_List.ncpus')] states = [ u[u.find('=') + 1:].lstrip().rstrip() for u in jobs.grep('job_state') ] ids = [u[u.find(':') + 1:].lstrip().rstrip() for u in jobs.grep('Job Id')] # the result is then synthesized, with the first field the job id, and the # last the job name. This is important since it will be used by cancel as such. return SList([ "{0:>10} {1:>4} {2:>3} -- {3}".format(id, mpp, state, name) \ for id, mpp, state, name in zip(ids, mpps, states, names)])
def ipython_qstat(self, arg): """ squeue --user=`whoami` -o "%7i %.3C %3t -- %50j" """ from subprocess import Popen, PIPE from IPython.utils.text import SList from getpass import getuser # finds user name. whoami = getuser() squeue = Popen(["squeue", "--user="******"-o", "\"%7i %.3C %3t %j\""], stdout=PIPE) result = squeue.stdout.read().rstrip().split('\n') result = SList([u[1:-1] for u in result[1:]]) return result.grep(str(arg[1:-1]))
def ipython_qstat(self, arg): """ squeue --user=`whoami` -o "%7i %.3C %3t -- %50j" """ from subprocess import Popen, PIPE from IPython.utils.text import SList from getpass import getuser # finds user name. whoami = getuser() squeue = Popen( ["squeue", "--user="******"-o", "\"%7i %.3C %3t %j\""], stdout=PIPE) result = squeue.stdout.read().rstrip().split('\n') result = SList([u[1:-1] for u in result[1:]]) return result.grep(str(arg[1:-1]))
def ipython_qstat(self, arg): """ squeue --user=`whoami` -o "%7i %.3C %3t -- %50j" """ from six import PY3 from subprocess import Popen, PIPE from IPython.utils.text import SList from getpass import getuser whoami = getuser() squeue = Popen(["squeue", "--user="******"-o", "\"%7i %.3C %3t %j\""], stdout=PIPE) result = squeue.stdout.read().rstrip().splitlines() if PY3: result = SList([u[1:-1].decode("utf-8") for u in result[1:]]) else: result = SList([u[1:-1] for u in result[1:]]) return result if str(arg) == '' else result.grep(str(arg[1:-1]))
def ipython_qstat(self, arg): """ Prints jobs of current user. """ from subprocess import Popen, PIPE from IPython.utils.text import SList # get user jobs ids jobs = SList(Popen(["qstat", "-f"], stdout=PIPE).communicate()[0].split("\n")) names = [u[u.find("=") + 1 :].lstrip().rstrip() for u in jobs.grep("Job_Name")] mpps = [int(u[u.find("=") + 1 :]) for u in jobs.grep("Resource_List.ncpus")] states = [u[u.find("=") + 1 :].lstrip().rstrip() for u in jobs.grep("job_state")] ids = [u[u.find(":") + 1 :].lstrip().rstrip() for u in jobs.grep("Job Id")] return SList( [ "{0:>10} {1:>4} {2:>3} -- {3}".format(id, mpp, state, name) for id, mpp, state, name in zip(ids, mpps, states, names) ] )
def ipython_qstat(self, arg): """ squeue --user=`whoami` -o "%7i %.3C %3t -- %50j" """ from six import PY3 from subprocess import Popen, PIPE from IPython.utils.text import SList from getpass import getuser whoami = getuser() squeue = Popen( ["squeue", "--user="******"-o", "\"%7i %.3C %3t %j\""], stdout=PIPE) result = squeue.stdout.read().rstrip().splitlines() if PY3: result = SList([u[1:-1].decode("utf-8") for u in result[1:]]) else: result = SList([u[1:-1] for u in result[1:]]) return result if str(arg) == '' else result.grep(str(arg[1:-1]))
def eval_body(body): ip = g_ipm.ip try: val = ip.ev(body) except Exception: # just use stringlist if it's not completely legal python expression val = SList(body.splitlines()) return val
def store_or_execute(self, block, name): """ Execute a block, or store it in a variable, per the user's request. """ b = self.cleanup_input(block) if name: # If storing it for further editing self.shell.user_ns[name] = SList(b.splitlines()) print("Block assigned to '%s'" % name) else: self.shell.user_ns['pasted_block'] = b self.shell.run_cell(b)
def ipython_qstat(self, arg): """ Prints jobs of current user. """ from subprocess import Popen, PIPE from IPython.utils.text import SList # get user jobs ids jobs = SList(Popen(['qstat', '-f'], stdout=PIPE).communicate()[0].split('\n')) names = [u[u.find('=')+1:].lstrip().rstrip() for u in jobs.grep('Job_Name')] mpps = [int(u[u.find('=')+1:]) for u in jobs.grep('Resource_List.ncpus')] states = [u[u.find('=')+1:].lstrip().rstrip() for u in jobs.grep('job_state')] ids = [u[u.find(':')+1:].lstrip().rstrip() for u in jobs.grep('Job Id')] return SList([ "{0:>10} {1:>4} {2:>3} -- {3}".format(id, mpp, state, name) \ for id, mpp, state, name in zip(ids, mpps, states, names)])
def store_or_execute(shell, block, name): """ Execute a block, or store it in a variable, per the user's request. """ # Dedent and prefilter so what we store matches what is executed by # run_cell. b = shell.prefilter(textwrap.dedent(block)) if name: # If storing it for further editing, run the prefilter on it shell.user_ns[name] = SList(b.splitlines()) print "Block assigned to '%s'" % name else: shell.user_ns['pasted_block'] = b shell.run_cell(b)
def store_or_execute(self, block, name): """ Execute a block, or store it in a variable, per the user's request. """ if name: # If storing it for further editing self.shell.user_ns[name] = SList(block.splitlines()) print("Block assigned to '%s'" % name) else: b = self.preclean_input(block) self.shell.user_ns['pasted_block'] = b self.shell.using_paste_magics = True try: self.shell.run_cell(b) finally: self.shell.using_paste_magics = False
def findfiles(patterns=(), path=None): '''Return filenames that match all specified patterns. patterns -- a list of string patterns that must all match in the the returned files. Can be also a single string with patterns separated by whitespace characters. path -- optional list of directories to be searched instead of the current directory. Can be also a string which is taken as a single directory path. Pattern syntax and examples: ^start -- match "start" only at the beginning of the string end$ -- match "end" only at the end of string <7> -- match number 7 preceded by any number of leading zeros <1-34> -- match an integer range. The matched number may have one or more leading zeros <7-> -- match an integer greater or equal 7 allowing leading zeros <-> -- match any integer Return a list of matching filenames. ''' import os.path from py15sacla.multipattern import MultiPattern from IPython.utils.text import SList if isinstance(path, basestring): path = [path] mp = MultiPattern(patterns) allpaths = ['.'] if path is None else path rv = SList() for d in unique_everseen(allpaths): if not os.path.isdir(d): continue dirfiles = os.listdir(d) dirfiles.sort(key=sortKeyNumericString) # filter matching names first, this does not need any disk access files = filter(mp.match, dirfiles) files = [os.path.normpath(os.path.join(d, f)) for f in files] # filter out any directories files = filter(os.path.isfile, files) rv += files return rv
def export(self, event): """ Tars files from a calculation. """ import six import argparse import tarfile from os import getcwd from os.path import exists, isfile, extsep, relpath, dirname, join from glob import iglob from ..misc import RelativePath from .. import interactive from . import get_shell shell = get_shell(self) parser = argparse.ArgumentParser( prog='%export', description='Exports input/output files from current job-folder. ' 'Depending on the extension of FILE, this will create ' 'a simple tar file, or a compressed tar file. Using the ' 'option --list, one can also obtain a list of all files ' 'which would go in the tar archive. ' 'Finally, this function only requires the \"collect\" ' 'exists in the usernamespace. It may have been declared ' 'from loading a job-folder using \"explore\", or directly ' 'with \"collect = vasp.MassExtract()\".') group = parser.add_mutually_exclusive_group(required=True) group.add_argument('--list', action="store_true", dest="aslist", help='Do not tar, return a list of all the files.') group.add_argument( 'filename', metavar='FILE', type=str, default='export.tar.gz', nargs='?', help='Path to the tarfile. Suffixes ".gz" and ".tgz" indicate ' 'gzip compression, whereas ".bz" and ".bz2" indicate bzip ' 'compression. Otherwise, no compression is used.') parser.add_argument('--input', action="store_true", dest="input", help='Include input (INCAR/crystal.d12) files.') parser.add_argument('--dos', action="store_true", dest="dos", help='Include Density of States (DOSCAR) files.') parser.add_argument('--structure', action="store_true", dest="structure", help='Include structure input (POSCAR) files.') parser.add_argument('--charge', action="store_true", dest="charge", help='Include charge (CHGCAR) files.') parser.add_argument('--contcar', action="store_true", dest="contcar", help='Include CONTCAR files.') parser.add_argument( '--wavefunctions', action="store_true", dest="wavefunctions", help='Include wavefunctions (WAVECAR/crystal.98) files.') parser.add_argument('--procar', action="store_true", dest="procar", help='Include PROCAR files.') group = parser.add_mutually_exclusive_group(required=False) group.add_argument('--down', action="store_true", dest="down", help='Tar from one directory down.') group.add_argument('--from', type=str, dest="dir", default=None, help='Root directory from which to give filenames. ' 'Defaults to current working directory.') group.add_argument( '--with', type=str, dest="others", nargs='*', help='Adds pattern or filename to files to export.' 'Any file in any visited directory matching the given pattern ' 'will be added to the archive. This options can be given more than ' 'once if different file patterns are required.') try: args = parser.parse_args(event.split()) except SystemExit as e: return None collect = shell.user_ns.get('collect', None) rootpath = getattr(collect, 'rootpath', None) if collect is None: print("Could not find 'collect' object in user namespace.") print("Please load a job-dictionary.") return kwargs = args.__dict__.copy() kwargs.pop('filename', None) if rootpath is None: if hasattr(shell.user_ns.get('collect', None), 'rootpath'): rootpath = shell.user_ns.get('collect').rootpath directory = getcwd() if rootpath is None else dirname(rootpath) if args.down: directory = join(directory, '..') elif args.dir is not None: directory = RelativePath(args.dir).path # set of directories visited. directories = set() # set of files to tar allfiles = set() for file in collect.files(**kwargs): allfiles.add(file) directories.add(dirname(file)) # adds files from "with" argument. if hasattr(args.others, "__iter__"): for pattern in args.others: for dir in directories: for sfile in iglob(join(dir, pattern)): if exists(sfile) and isfile(sfile): allfiles.add(sfile) # adds current job folder. if interactive.jobfolder_path is not None: if isfile(interactive.jobfolder_path): allfiles.add(interactive.jobfolder_path) # now tar or list files. if args.aslist: from IPython.utils.text import SList directory = getcwd() return SList([relpath(file, directory) for file in allfiles]) else: # get filename of tarfile. args.filename = relpath(RelativePath(args.filename).path, getcwd()) if exists(args.filename): if not isfile(args.filename): print("{0} exists but is not a file. Aborting.".format( args.filename)) return a = '' while a not in ['n', 'y']: a = six.raw_input( "File {0} already exists.\nOverwrite? [y/n] ".format( args.filename)) if a == 'n': print("Aborted.") return # figure out the type of the tarfile. if args.filename.find(extsep) == -1: endname = '' else: endname = args.filename[-args.filename[::-1].find(extsep) - 1:][1:] if endname in ['gz', 'tgz']: tarme = tarfile.open(args.filename, 'w:gz') elif endname in ['bz', 'bz2']: tarme = tarfile.open(args.filename, 'w:bz2') else: tarme = tarfile.open(args.filename, 'w') for file in allfiles: tarme.add(file, arcname=relpath(file, directory)) tarme.close() print("Saved archive to {0}.".format(args.filename))
def mglob_f(self, arg): from IPython.utils.text import SList if arg.strip(): return SList(expand(arg)) print "Please specify pattern!" print globsyntax
class LeoNode(UserDictMixin,object): #### (object,UserDict.DictMixin): """ Node in Leo outline Most important attributes (getters/setters available: .v - evaluate node, can also be aligned .b, .h - body string, headline string .l - value as string list Also supports iteration, setitem / getitem (indexing): wb.foo['key'] = 12 assert wb.foo['key'].v == 12 Note the asymmetry on setitem and getitem! Also other dict methods are available. .ipush() - run push-to-ipython Minibuffer command access (tab completion works): mb save-to-file """ #@+others #@+node:ekr.20120401063816.10190: *3* __init__ (LeoNode) def __init__(self,p): self.c = p.v.context # New in Leo 4.10.1. self.p = p.copy() #@+node:ekr.20120401063816.10191: *3* __str__ def __str__(self): return "<LeoNode %s>" % str(self.p.h) __repr__ = __str__ #@+node:ekr.20120401063816.10192: *3* __get_h and _set_h def __get_h(self): return self.p.headString() def __set_h(self,val): c = self.c c.setHeadString(self.p,val) LeoNode.last_edited = self c.redraw() h = property( __get_h, __set_h, doc = "Node headline string") #@+node:ekr.20120401063816.10193: *3* _get_b and __set_b def __get_b(self): return self.p.bodyString() def __set_b(self,val): c = self.c c.setBodyString(self.p, val) LeoNode.last_edited = self c.redraw() b = property(__get_b, __set_b, doc = "Nody body string") #@+node:ekr.20120401063816.10194: *3* __set_val def __set_val(self, val): self.b = format_for_leo(val) v = property( # pylint: disable=W0108 # W0108:LeoNode.<lambda>: Lambda may not be necessary lambda self: eval_node(self), __set_val, doc = "Node evaluated value") #@+node:ekr.20120401063816.10195: *3* __set_l def __set_l(self,val): self.b = '\n'.join(val ) l = property(lambda self : SList(self.b.splitlines()), __set_l, doc = "Node value as string list") #@+node:ekr.20120401063816.10196: *3* __iter__ def __iter__(self): """ Iterate through nodes direct children """ return (LeoNode(p) for p in self.p.children_iter()) #@+node:ekr.20120401063816.10197: *3* __children def __children(self): d = {} for child in self: head = child.h tup = head.split(None,1) if len(tup) > 1 and tup[0] == '@k': d[tup[1]] = child continue if not valid_attribute(head): d[head] = child continue return d #@+node:ekr.20120401063816.10198: *3* keys def keys(self): d = self.__children() return sorted(list(d.keys())) #@+node:ekr.20120401063816.10199: *3* __getitem__ def __getitem__(self, key): """ wb.foo['Some stuff'] Return a child node with headline 'Some stuff' If key is a valid python name (e.g. 'foo'), look for headline '@k foo' as well """ key = str(key) d = self.__children() return d[key] #@+node:ekr.20120401063816.10200: *3* __setitem__ def __setitem__(self, key, val): """ You can do wb.foo['My Stuff'] = 12 to create children Create 'My Stuff' as a child of foo (if it does not exist), and do .v = 12 assignment. Exception: wb.foo['bar'] = 12 will create a child with headline '@k bar', because bar is a valid python name and we don't want to crowd the WorkBook namespace with (possibly numerous) entries. """ c = self.c key = str(key) d = self.__children() if key in d: d[key].v = val return if not valid_attribute(key): head = key else: head = '@k ' + key p = c.createLastChildNode(self.p, head, '') LeoNode(p).v = val #@+node:ekr.20120401063816.10201: *3* __delitem__ def __delitem__(self, key): """ Remove child Allows stuff like wb.foo.clear() to remove all children """ c = self.c self[key].p.doDelete() c.redraw() #@+node:ekr.20120401063816.10202: *3* ipush (LeoNode) def ipush(self): """ Does push-to-ipython on the node """ # push_from_leo(self) CommandChainDispatcher(self) #@+node:ekr.20120401063816.10203: *3* go def go(self): """ Set node as current node (to quickly see it in Outline) """ #c.setCurrentPosition(self.p) # argh, there should be another way #c.redraw() #s = self.p.bodyString() #c.setBodyString(self.p,s) c = self.c c.selectPosition(self.p) #@+node:ekr.20120401063816.10204: *3* append def append(self): """ Add new node as the last child, return the new node """ p = self.p.insertAsLastChild() return LeoNode(p) #@+node:ekr.20120401063816.10205: *3* script def script(self): """ Method to get the 'tangled' contents of the node (parse @others, section references etc.) """ c = self.c return g.getScript(c,self.p,useSelectedText=False,useSentinels=False) #@+node:ekr.20120401063816.10206: *3* __get_uA def __get_uA(self): # Create the uA if necessary. p = self.p if not hasattr(p.v,'unknownAttributes'): p.v.unknownAttributes = {} d = p.v.unknownAttributes.setdefault('ipython', {}) return d #@-others uA = property(__get_uA, doc = "Access persistent unknownAttributes of node")