def magic_sym(self, arg): """ Create Sympy variables easily. """ try: import sympy except ImportError: raise UsageError("could not import sympy.") args = parse_argstring(magic_sym, arg) factory = sympy.Symbol kwds = {} if args.kind == 'integer': kwds = dict(integer=True) elif args.kind == 'real': kwds = dict(real=True) elif args.kind == 'complex': kwds = dict(complex=True) elif args.kind == 'function': factory = sympy.Function if not args.quiet: if args.kind is not None: print 'Adding %s variables:' % args.kind else: print 'Adding variables:' for name in args.names: name = name.encode('ascii') var = factory(name, **kwds) get_shell(self).user_ns[name] = var if not args.quiet: print ' %s' % name
def arcomm(self, line, cell=None, local_ns={}): args = magic_arguments.parse_argstring(self.arcomm, line) commands = [] responses = [] if args.command: commands = [args.command] elif cell: commands = [cmd for cmd in cell.splitlines()] for endpoint in args.endpoints: if endpoint in self._connections: conn = self._connections[endpoint] else: conn = arcomm.connect(endpoint, askpass=args.askpass) self._connections[conn.hostname] = conn if commands: response = conn.send(commands, encoding=args.encoding) print(response) sys.stdout.flush() responses.append(response) return responses
def magic_push_print(self, arg): """ Set numpy array printing options by pushing onto a stack. """ try: import numpy except ImportError: raise UsageError("could not import numpy.") args = parse_argstring(magic_push_print, arg) kwds = {} if args.precision is not None: kwds['precision'] = args.precision if args.threshold is not None: if args.threshold == 0: args.threshold = sys.maxint kwds['threshold'] = args.threshold if args.edgeitems is not None: kwds['edgeitems'] = args.edgeitems if args.linewidth is not None: kwds['linewidth'] = args.linewidth if args.suppress is not None: kwds['suppress'] = args.suppress if args.nanstr is not None: kwds['nanstr'] = args.nanstr if args.infstr is not None: kwds['infstr'] = args.infstr old_options = numpy.get_printoptions() numpy.set_printoptions(**kwds) stack = getattr(self, '_numpy_printoptions_stack', []) stack.append(old_options) self._numpy_printoptions_stack = stack if not args.quiet: print_numpy_printoptions(numpy.get_printoptions())
def include(self, line, cell): """ include section of the files in scpy2 folder """ import json ip = get_ipython() args = parse_argstring(self.include, line) language = args.language filepath = args.path section = args.section run = args.run first_line = "%%include " + line section_text = get_section(filepath, section) if run: ip.run_cell(section_text) text = json.dumps(unicode(first_line) + u"\n" + section_text.decode("utf8")) code = """%%javascript (function(pattern, text){{ var cells = IPython.notebook.get_cells(); for (var i = 0; i < cells.length; i++) {{ var cell = cells[i]; if (cell.get_text().indexOf(pattern) == 0){{ cell.set_text(text); }} }} }})({0}, {1}); """.format(json.dumps(first_line), text) ip.run_cell(code) from IPython import display display.clear_output()
def fortran_config(self, line): """ View and handle the custom configuration for %%fortran magic. %fortran_config Show the current custom configuration %fortran_config --defaults Delete the current configuration and back to defaults %fortran_config <other options> Save <other options> to use with %%fortran """ args = magic_arguments.parse_argstring(self.fortran_config, line) if args.defaults: try: del self.shell.db['fortran'] print("Deleted custom config. Back to default arguments for %%fortran") except KeyError: print("No custom config found for %%fortran") elif not line: try: line = self.shell.db['fortran'] except KeyError: print("No custom config found for %%fortran") print("Current defaults arguments for %%fortran:\n\t%s" % line) else: self.shell.db['fortran'] = line print("New default arguments for %%fortran:\n\t%s" % line)
def pythran(self, line, cell): """ Compile and import everything from a Pythran code cell. %%pythran #pythran export foo(int) def foo(x): return x + x """ args = magic_arguments.parse_argstring(self.pythran, line) kwargs = {} if args.D: kwargs['define_macros'] = args.D if args.O: kwargs.setdefault('extra_compile_args', []).extend( '-O' + str(x) for x in args.O) if args.march: kwargs.setdefault('extra_compile_args', []).extend( '-march=' + str(x) for x in args.march) if args.fopenmp: kwargs.setdefault('extra_compile_args', []).append( '-fopenmp') m = hashlib.md5() m.update(cell) module_name = "pythranized_" + m.hexdigest() module_path = pythran.compile_pythrancode(module_name, cell, **kwargs) module = imp.load_dynamic(module_name, module_path) self._import_all(module)
def magic_print_methods(self, arg): """ Print the methods of an object or type. """ args = parse_argstring(magic_print_methods, arg) obj = get_variable(self, args.variable) if not isinstance(obj, (type, types.ClassType)): klass = type(obj) else: klass = obj attrs = inspect.classify_class_attrs(klass) grouped = defaultdict(list) all = [] for name, kind, defining, value in attrs: if kind not in ('method', 'class method', 'static method'): continue if args.private or not name.startswith('_'): grouped[defining].append(name) all.append(name) if args.group: for cls in inspect.getmro(klass)[::-1]: if grouped[cls]: name = getattr(cls, '__name__', repr(cls)) print name print '-'*len(name) print utils.columnize(grouped[cls]) else: print utils.columnize(all)
def ghci(self, line, cell=None, local_ns=None): ''' Execute code in Ghci, and pull some of the results back into the Python namespace. ''' args = parse_argstring(self.ghci, line) if cell is None: code = '' else: code = cell try: text_output = self._ghci.run(code, verbose=args.verbose) except (ghci2py.Ghci2PyError) as exception: msg = exception.message raise GhciMagicError('Ghci could not complete execution: %s' % msg) key = 'GhciMagic.Ghci' display_data = [] # Publish text output if text_output: display_data.append((key, {'text/plain': text_output})) for source, data in display_data: self._publish_display_data(source, data)
def unlink_ruleset(self, line): args = parse_argstring(self.unlink_ruleset, line) assoc = cnode.access_ruleset_assocs.filter_by(ruleset_name=args.name, ruletype=args.ruletype).scalar() if assoc is None: logg.warn("ruleset %s (%s) not used by current node, ignored", args.name, args.ruletype) else: cnode.access_ruleset_assocs.remove(assoc)
def sparql(self, line, cell=None): arg = parse_argstring(self.sparql, line) if cell is not None: sources, cell = extract_froms(cell) source = self.shell.user_ns.get(arg.source, arg.source) sources.extend(self._parse_source(source)) if arg.model is None and len(sources) == 0: raise UsageError("Please specify a source to query against.") model = self._get_model(arg.model) for source in sources: if source.startswith("tracker:"): raise NotImplemented("Tracker queries not implemented yet") else: load_source(model, source) body = prepare_query(cell) query = RDF.SPARQLQuery(body) results = LibRdfResults(query.execute(model)) if arg.count: print ("Found {0} rows.".format(len(results))) if arg.output is None: return results else: self.shell.user_ns[arg.output] = results
def result(self, line=''): """Print the result of the last asynchronous %px command. This lets you recall the results of %px computations after asynchronous submission (block=False). Examples -------- :: In [23]: %px os.getpid() Async parallel execution on engine(s): all In [24]: %pxresult Out[8:10]: 60920 Out[9:10]: 60921 Out[10:10]: 60922 Out[11:10]: 60923 """ args = magic_arguments.parse_argstring(self.result, line) if self.last_result is None: raise UsageError(NO_LAST_RESULT) self.last_result.get() self.last_result.display_outputs(groupby=args.groupby)
def Rget(self, line): ''' Return an object from rpy2, possibly as a structured array (if possible). Similar to Rpull except only one argument is accepted and the value is returned rather than pushed to self.shell.user_ns:: In [3]: dtype=[('x', '<i4'), ('y', '<f8'), ('z', '|S1')] In [4]: datapy = np.array([(1, 2.9, 'a'), (2, 3.5, 'b'), (3, 2.1, 'c'), (4, 5, 'e')], dtype=dtype) In [5]: %R -i datapy In [6]: %Rget datapy Out[6]: array([['1', '2', '3', '4'], ['2', '3', '2', '5'], ['a', 'b', 'c', 'e']], dtype='|S1') In [7]: %Rget -d datapy Out[7]: array([(1, 2.9, 'a'), (2, 3.5, 'b'), (3, 2.1, 'c'), (4, 5.0, 'e')], dtype=[('x', '<i4'), ('y', '<f8'), ('z', '|S1')]) ''' args = parse_argstring(self.Rget, line) output = args.output return self.Rconverter(self.r(output[0]),dataframe=args.as_dataframe)
def notebook(self, s): """Export and convert IPython notebooks. This function can export the current IPython history to a notebook file. For example, to export the history to "foo.ipynb" do "%notebook foo.ipynb". The -e or --export flag is deprecated in IPython 5.2, and will be removed in the future. """ args = magic_arguments.parse_argstring(self.notebook, s) from nbformat import write, v4 cells = [] hist = list(self.shell.history_manager.get_range()) if(len(hist)<=1): raise ValueError('History is empty, cannot export') for session, execution_count, source in hist[:-1]: cells.append(v4.new_code_cell( execution_count=execution_count, source=source )) nb = v4.new_notebook(cells=cells) with io.open(args.filename, 'w', encoding='utf-8') as f: write(nb, f, version=4)
def matplotlib(self, line=''): """Set up matplotlib to work interactively. This function lets you activate matplotlib interactive support at any point during an IPython session. It does not import anything into the interactive namespace. If you are using the inline matplotlib backend for embedded figures, you can adjust its behavior via the %config magic:: # enable SVG figures, necessary for SVG+XHTML export in the qtconsole In [1]: %config InlineBackend.figure_format = 'svg' # change the behavior of closing all figures at the end of each # execution (cell), or allowing reuse of active figures across # cells: In [2]: %config InlineBackend.close_figures = False Examples -------- In this case, where the MPL default is TkAgg:: In [2]: %matplotlib Using matplotlib backend: TkAgg But you can explicitly request a different backend:: In [3]: %matplotlib qt """ args = magic_arguments.parse_argstring(self.matplotlib, line) gui, backend = self.shell.enable_matplotlib(args.gui) self._show_matplotlib_backend(args.gui, backend)
def jade(self, line, cell): line = line.strip() args = magic_arguments.parse_argstring(self.jade, line) display(Javascript( """ require( [ "notebook/js/codecell", "codemirror/mode/jade/jade" ], function(cc){ cc.CodeCell.options_default.highlight_modes.magic_jade = { reg: ["^%%jade"] } } ); """)) try: val = pyjade.simple_convert(cell) except Exception as err: print(err) return if args.var_name is not None: get_ipython().user_ns[args.var_name] = val else: return HTML(val)
def wrf(self, line, cell='', va=0x500000): """Write hex words into the RAM overlay region, then instantly move the overlay into place. It's a sneaky trick that looks like a temporary way to write to Flash. For example, this patches the signature as it appears in the current version of the Backdoor patch itself. Normally this can't be modified, since it's in flash: : rd c9720 50 000c9720 ac 42 4c 58 ac 6c 6f 63 ac 65 65 42 ac 6f 6b 42 .BLX.loc.eeB.okB 000c9730 e6 0c 00 02 a8 00 04 04 c0 46 c0 46 c0 46 c0 46 .........F.F.F.F 000c9740 7e 4d 65 53 60 31 34 20 76 2e 30 32 20 20 20 20 ~MeS`14 v.02 000c9750 53 1c 0b 60 16 70 0a 68 53 1c 0b 60 16 70 0a 68 S..`.p.hS..`.p.h 000c9760 53 1c 0b 60 16 70 0a 68 53 1c 0b 60 16 70 29 88 S..`.p.hS..`.p). : wrf c9740 55555555 : rd c9720 50 000c9720 ac 42 4c 58 ac 6c 6f 63 ac 65 65 42 ac 6f 6b 42 .BLX.loc.eeB.okB 000c9730 e6 0c 00 02 a8 00 04 04 c0 46 c0 46 c0 46 c0 46 .........F.F.F.F 000c9740 55 55 55 55 60 31 34 20 76 2e 30 32 20 20 20 20 UUUU`14 v.02 000c9750 53 1c 0b 60 16 70 0a 68 53 1c 0b 60 16 70 0a 68 S..`.p.hS..`.p.h 000c9760 53 1c 0b 60 16 70 0a 68 53 1c 0b 60 16 70 29 88 S..`.p.hS..`.p). : sc c ac 00000000 55 55 55 55 60 31 34 20 76 2e 30 32 UUUU`14 v.02 """ args = parse_argstring(self.wr, line) d = self.shell.user_ns['d'] args.word.extend(map(hexint, cell.split())) overlay_set(d, va, len(args.word)) poke_words(d, va, args.word) overlay_set(d, args.address, len(args.word))
def debug(self, line='', cell=None): """Activate the interactive debugger. This magic command support two ways of activating debugger. One is to activate debugger before executing code. This way, you can set a break point, to step through the code from the point. You can use this mode by giving statements to execute and optionally a breakpoint. The other one is to activate debugger in post-mortem mode. You can activate this mode simply running %debug without any argument. If an exception has just occurred, this lets you inspect its stack frames interactively. Note that this will always work only on the last traceback that occurred, so you must call this quickly after an exception that you wish to inspect has fired, because if another one occurs, it clobbers the previous one. If you want IPython to automatically do this on every exception, see the %pdb magic for more details. """ args = magic_arguments.parse_argstring(self.debug, line) if not (args.breakpoint or args.statement or cell): self._debug_post_mortem() else: code = "\n".join(args.statement) if cell: code += "\n" + cell self._debug_exec(code, args.breakpoint)
def jsmva(self, line): from JPyInterface import functions args = parse_argstring(self.jsmva, line) if args.arg == 'on': functions.register() elif args.arg == 'off': functions.unregister()
def cell_px(self, line='', cell=None): """Executes the cell in parallel. Examples -------- :: In [24]: %%px --noblock ....: a = os.getpid() Async parallel execution on engine(s): all In [25]: %%px ....: print a [stdout:0] 1234 [stdout:1] 1235 [stdout:2] 1236 [stdout:3] 1237 """ args = magic_arguments.parse_argstring(self.cell_px, line) if args.targets: save_targets = self.view.targets self.view.targets = self._eval_target_str(args.targets) try: return self.parallel_execute(cell, block=args.block, groupby=args.groupby, save_name=args.save_name, ) finally: if args.targets: self.view.targets = save_targets
def reset(self, line=''): """Reset and reopen the USB interface.""" args = parse_argstring(self.reset, line) d = self.shell.user_ns['d_remote'] d.reset() if args.arm: reset_arm(d)
def magic_print_traits(self, arg): """ Print the traits of an object. """ try: from IPython.external.pretty import pretty except ImportError: import pprint pretty = pprint.pformat args = parse_argstring(magic_print_traits, arg) obj = get_variable(self, args.variable) if not hasattr(obj, 'trait_names'): raise UsageError('variable %r is not a HasTraits instance' % args.variable) from traits.has_traits import not_event from traits.trait_errors import TraitError names = obj.trait_names(type=not_event) names.sort() key_values = [] for name in names: try: value = getattr(obj, name) except (AttributeError, TraitError), e: pvalue = '<undefined>' else: pvalue = pretty(value) key_values.append((name, pvalue))
def initprov(self, line, cell): args = magic_arguments.parse_argstring(self.prov, line) rcode = """from rdflib import Graph, Namespace, URIRef, Literal from rdflib.namespace import DC, NamespaceManager import operators if '__NAMESPACE__' not in globals(): __NAMESPACE__ = 'http://example.org/' __OBJDICT__ = {} __URIDICT__ = {} __TEXTDICT__ = {} __PROV__ = Graph() pub = Namespace("http://orion.tw.rpi.edu/~fulinyun/ontology/prov-pub/") prov = Namespace("http://www.w3.org/ns/prov#") example = Namespace(__NAMESPACE__) namespace_manager = NamespaceManager(Graph()) namespace_manager.bind('pub', pub, override=False) namespace_manager.bind('prov', prov, override=False) namespace_manager.bind('dc', DC, override=False) namespace_manager.bind('', example, override=False) __PROV__.namespace_manager = namespace_manager """ if args.verbose: print("Actual code executed:\n"+rcode) get_ipython().run_cell(rcode) self.prov(line, cell)
def cut(self, line, cell): from IPython.core.getipython import get_ipython from sys import stdout args = parse_argstring(self.cut, line) max_lines = args.lines max_chars = args.chars counters = dict(chars=0, lines=0) def write(string): counters["lines"] += string.count("\n") counters["chars"] += len(string) if counters["lines"] >= max_lines: raise IOError("Too many lines") elif counters["chars"] >= max_chars: raise IOError("Too many characters") else: old_write(string) try: old_write, stdout.write = stdout.write, write ipython = get_ipython() ipython.run_cell(cell) finally: del stdout.write
def warpscript(self, line, cell=None, local_ns=None): args = parse_argstring(self.warpscript, line) if args.url is not None: if len(args.url[0]) > 1: warnings.warn("\nOnly one url accepted", UserWarning) url = unicode_to_str(args.url[0][0]) binary_code = code.encode('UTF-8') req = urllib2.Request(url, binary_code) rsp = urllib2.urlopen(req) content = rsp.read() if not silent: str_response = content.decode('UTF-8') obj = json.loads(str_response) stream_content = {'name': 'stdout', 'text': content.decode('UTF-8')} self.send_response(self.iopub_socket, 'stream', stream_content) return {'status': 'ok', # The base class increments the execution count 'execution_count': self.execution_count, 'payload': [], 'user_expressions': {}, }
def magic_push_err(self, arg): """ Set numpy numerical error handling via a stack. """ try: import numpy except ImportError: raise UsageError("could not import numpy.") sentinel = object() args = parse_argstring(magic_push_err, arg) kwds = {} errcall = sentinel for key in ['all', 'divide', 'over', 'under', 'invalid']: value = getattr(args, key) if value is not None: kwds[key] = value if args.call_func is not None: if args.no_call_func: raise UsageError("You cannot specify both a --call-func and " "--no-call-func at the same time.") global_ns = get_shell(self).user_global_ns local_ns = get_shell(self).user_ns try: errcall = eval(args.call_func, global_ns, local_ns) except Exception, e: raise UsageError('Could not find function %r.\n%s: %s' % (args.call_func, e.__class__.__name__, e))
def sc(self, line, cell=''): """Send a low-level SCSI command with a 12-byte CDB""" args = parse_argstring(self.sc, line) d = self.shell.user_ns['d'] cdb = ''.join(map(chr, args.cdb)) data = scsi_in(d, cdb, args.len) self.shell.write(hexdump(data))
def django_settings(self, line, cell=""): """ Show and setup the django settings to use with %%django_orm %django_settings Load the current custom configuration in a new cell %django_settings --default Delete the current configuration and back to the default %%django_settings DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'db.sqlite' #':memory:' } } INSTALLED_APPS = ("orm_magic",) """ args = magic_arguments.parse_argstring(self.django_settings, line) if args.default: try: del self.shell.db['django_orm'] print("Deleted custom settings. Back to default for %%django_orm") except KeyError: print("No custom settings found for %%django_orm") elif not line and not cell: self.shell.set_next_input("%%django_settings\n" + self.get_settings()) else: self.shell.db['django_orm'] = cell print("Settings for %%django_orm configured succesfully")
def bic(self, line, cell=''): """Read/modify/write hex words into ARM memory, [mem] &= ~arg""" args = parse_argstring(self.bic, line) d = self.shell.user_ns['d'] args.word.extend(map(hexint, cell.split())) for i, w in enumerate(args.word): poke_bic(d, args.address + i*4, w)
def ferret_getdata(self, line): ''' Line-level magic to get data from ferret. In [18]: %%ferret ....: use levitus_climatology In [19]: %ferret_getdata tempdict = temp ....: Message: tempdict is now available in python as a dictionary containing the variable's metadata and data array. In [20]: print tempdict.keys() ....: ['axis_coords', 'axis_types', 'data_unit', 'axis_units', 'title', 'axis_names', 'missing_value', 'data'] ''' args = parse_argstring(self.ferret_getdata, line) code = unicode_to_str(''.join(args.code)) pythonvariable = code.split('=')[0] ferretvariable = code.split('=')[1] exec('%s = pyferret.getdata("%s", %s)' % (pythonvariable, ferretvariable, args.create_mask) ) self.shell.push("%s" % pythonvariable) publish_display_data('ferretMagic.ferret', {'text/html': '<pre style="background-color:#F2F5A9; border-radius: 4px 4px 4px 4px; font-size: smaller">' + 'Message: ' + pythonvariable + " is now available in python as a dictionary containing the variable's metadata and data array." '</pre>' })
def ferret_putdata(self, line): ''' Line-level magic to put data to ferret. In [31]: import numpy as np ....: b = {} ....: b['name']='myvar' ....: b['name']='myvar' ....: x=np.linspace(-np.pi*4, np.pi*4, 500) ....: b['data']=np.sin(x)/x ....: b.keys() Out[31]: ['data', 'name'] In [32]: %ferret_putdata --axis_pos (1,0,2,3,4,5) b ....: Message: b is now available in ferret as myvar ''' args = parse_argstring(self.ferret_putdata, line) ferretvariable = unicode_to_str(args.code[0]) if args.axis_pos: axis_pos_variable = eval(args.axis_pos) else: axis_pos_variable = None pyferret.putdata(self.shell.user_ns[ferretvariable], axis_pos=axis_pos_variable) publish_display_data('ferretMagic.ferret', {'text/html': '<pre style="background-color:#F2F5A9; border-radius: 4px 4px 4px 4px; font-size: smaller">' + 'Message: ' + ferretvariable + ' is now available in ferret as ' + self.shell.user_ns[ferretvariable]['name'] + '</pre>' })
def writefile(self, line, cell): """Write the contents of the cell to a file. The file will be overwritten unless the -a (--append) flag is specified. """ args = magic_arguments.parse_argstring(self.writefile, line) filename = os.path.expanduser(unquote_filename(args.filename)) if os.path.exists(filename): if args.append: print("Appending to %s" % filename) else: print("Overwriting %s" % filename) else: print("Writing %s" % filename) mode = 'a' if args.append else 'w' with io.open(filename, mode, encoding='utf-8') as f: f.write(cell)
def _get_import_cache(ipython): """ Load a mapping of names to import statements from the IPython history. """ import_cache = {} def _format_alias(alias): return ("import {0.name} as {0.asname}" if alias.asname else "import {0.name}").format(alias) class Visitor(ast.NodeVisitor): def visit_Import(self, node): for alias in node.names: (import_cache.setdefault(alias.asname or alias.name, set()) .add(_format_alias(alias))) def visit_ImportFrom(self, node): if node.level: # Skip relative imports. return for alias in node.names: (import_cache.setdefault(alias.asname or alias.name, set()) .add("from {} {}".format(node.module, _format_alias(alias)))) for _, _, entry in ( ipython.history_manager.get_tail( ipython.history_load_length, raw=True)): if entry.startswith("%autoimport"): try: args = parse_argstring(autoimport, entry[len("%autoimport"):]) if args.clear: import_cache.pop(args.clear, None) except UsageError: pass else: try: parsed = ast.parse(entry) except SyntaxError: continue Visitor().visit(parsed) return import_cache
def cypher(self, line, cell=''): '''Run cypher commands commands.''' args = parse_argstring(self.cypher, line) if args.variable: cell = self.shell.user_ns[args.variable] pwd = DEFAULT_PWD if args.password is None else args.password output_type = args.output if args.reset: self.graph = None print("Neo4j database connection reset...") return if self.graph is None or args.password is not None: print(f'Accessing graph database with password: {pwd}') self.graph = Graph(password=pwd) if args.quiet or not cell: return _response = self.graph.run(cell) if output_type is None: response = _response.to_data_frame() elif output_type == 'matrix': try: import sympy response = _response.to_matrix() except ModuleNotFoundError: warn("You need to install sympy to return a matrix.") response = None elif output_type == 'table': response = _response.to_table() else: response = _response.to_data_frame() return response #ip = get_ipython() #ip.register_magics(CypherMagic)
def execute(self, arg_string, content_string = None): args = parse_argstring(self.execute, arg_string) job_filter = None if (args.my): job_filter = "submitter eq '%s'" % (self._adla_service.logged_in_user) self._write_line("Listing azure data lake jobs under account '%s'..." % (args.account)) jobs = self._adla_service.retrieve_jobs(args.account, job_filter, args.page_index, args.page_job_number) html = "<table>" html += " <caption>%d datalake analytics job(s) listed</caption>" % (len(jobs)) html += " <thead>" html += " <tr>" html += " <th>Job Name</th>" html += " <th>Submitter</th>" html += " <th>State</th>" html += " <th>Result</th>" html += " <th></th>" html += " </tr>" html += " </thead>" html += " <tbody>" for job in jobs: job_details = "Parallelism: %d%s" % (job.parallelism, linesep) job_details += "Priority: %d%s" % (job.priority, linesep) job_details += "Submit Time: %s%s" % (str(job.submit_time), linesep) job_details += "Start Time: %s%s" % (str(job.start_time), linesep) job_details += "End Time: %s%s" % (str(job.end_time), linesep) html += " <tr title='%s'>" % (job_details) html += " <td>%s</td>" % (job.name) html += " <td>%s</td>" % (job.submitter) html += " <td>%s</td>" % (job.state) html += " <td>%s</td>" % (job.result) html += " <td><a href='https://%s.azuredatalakeanalytics.net/jobLink/%s' target='_blank'>View Job</a></td>" % (args.account, job.id) html += " </tr>" html += " </tbody>" html += "</table>" self._write_html(html) return self._convert_to_df(jobs)
def Rpull(self, line): ''' A line-level magic for R that pulls variables from python to rpy2:: In [18]: _ = %R x = c(3,4,6.7); y = c(4,6,7); z = c('a',3,4) In [19]: %Rpull x y z In [20]: x Out[20]: array([ 3. , 4. , 6.7]) In [21]: y Out[21]: array([ 4., 6., 7.]) In [22]: z Out[22]: array(['a', '3', '4'], dtype='|S1') If --as_dataframe, then each object is returned as a structured array after first passed through "as.data.frame" in R before being calling self.Rconverter. This is useful when a structured array is desired as output, or when the object in R has mixed data types. See the %%R docstring for more examples. Notes ----- Beware that R names can have '.' so this is not fool proof. To avoid this, don't name your R objects with '.'s... ''' args = parse_argstring(self.Rpull, line) outputs = args.outputs for output in outputs: self.shell.push({ output: self.Rconverter(self.r(output), dataframe=args.as_dataframe) })
def setup_logging(self, line): if (self.logger_initialized): logging.getLogger('GPUSimulators').info( "Global logger already initialized!") return else: self.logger_initialized = True args = magic_arguments.parse_argstring(self.setup_logging, line) import sys #Get root logger logger = logging.getLogger('GPUSimulators') logger.setLevel(min(args.level, args.file_level)) #Add log to screen ch = logging.StreamHandler() ch.setLevel(args.level) logger.addHandler(ch) logger.log(args.level, "Console logger using level %s", logging.getLevelName(args.level)) #Get the outfilename (try to evaluate if Python expression...) try: outfile = eval(args.out, self.shell.user_global_ns, self.shell.user_ns) except: outfile = args.out #Add log to file logger.log(args.level, "File logger using level %s to %s", logging.getLevelName(args.file_level), outfile) fh = logging.FileHandler(outfile) formatter = logging.Formatter( '%(asctime)s:%(name)s:%(levelname)s: %(message)s') fh.setFormatter(formatter) fh.setLevel(args.file_level) logger.addHandler(fh) logger.info("Python version %s", sys.version) self.shell.user_ns[args.name] = logger
def sim_magic_imports(self, line, cell=None): "Send code to simulator with imports and common definitions." args = magic_arguments.parse_argstring(self.sim_magic_imports, line) preload = """#---- sim_magic_imports BOILERPLATE ---- from ev3dev2.motor import MoveTank, MoveSteering, SpeedPercent, OUTPUT_B, OUTPUT_C from ev3dev2.sensor import INPUT_1, INPUT_2, INPUT_3, INPUT_4 from ev3dev2.sensor.lego import ColorSensor, GyroSensor, UltrasonicSensor from ev3dev2.sound import Sound from ev3dev2_glue import get_clock #----- YOUR CODE BELOW HERE ----- """ if args.previewcode: return preload elif not cell and not args.preview: return elif args.preview and cell is None: # print(preload) display(HTML(highlight(preload, PythonLexer(), HtmlFormatter()))) return try: cell = preload + cell self.shell.user_ns[args.sim].set_element("prog", cell) self.updateCode(args.sim) self.handle_args(args) except: print(f"There seems to be a problem... Is {args.sim} defined?") return if not args.quiet: self.download_ping(args.sim) # self.give_focus_to_run() if args.autorun: self.check_element(args.sim, args.autorun, "roboSim-display-runstop") if args.preview: # print(cell) display(HTML(highlight(cell, PythonLexer(), HtmlFormatter())))
def bash2(self, line='', cell=None): ''' Wrapper for bash magic ''' # Parse args args = parse_argstring(self.bash2, line) #print(args) # Change to new working directory try: if args.dir is not None: if args.dir[0] == "~": args.dir = args.dir.replace( "~",os.path.expanduser("~") ) os.chdir(args.dir) except Exception as e: print(e) else: # Instantiate original bash magic newscript = script.ScriptMagics newscript.shell = self.shell # Add oneliner to end of cell pwdcmd = '\necho :pwd:`pwd` >&2' newcell = cell + pwdcmd # Call script magic for bash newscript.shebang(newscript, line='bash --err reterr', cell=newcell) # Extract new directory err = newscript.shell.user_ns['reterr'] newdir = err.split(':pwd:')[-1].strip() # Change into new directory try: os.chdir(newdir) except Exception as e: print(e) return
def cache(self, line, cell): """Cache user variables in a file, and skip the cell if the cached variables exist. Usage: %%cache myfile.pkl var1 var2 # If myfile.pkl doesn't exist, this cell is executed and # var1 and var2 are saved in this file. # Otherwise, the cell is skipped and these variables are # injected from the file to the interactive namespace. var1 = ... var2 = ... """ ip = self.shell args = magic_arguments.parse_argstring(self.cache, line) code = cell if cell.endswith('\n') else cell+'\n' vars = clean_vars(args.vars) path = conditional_eval(args.to[0], ip.user_ns) cachedir_from_path = os.path.split(path)[0] # The cachedir can be specified with --cachedir or inferred from the # path or in ipython_config.py cachedir = args.cachedir or cachedir_from_path or self.cachedir # If path is relative, use the user-specified cache cachedir. if not os.path.isabs(path) and cachedir: # Try to create the cachedir if it does not already exist. if not os.path.exists(cachedir): try: os.mkdir(cachedir) print("[Created cachedir '{0:s}'.]".format(cachedir)) except: pass path = os.path.join(cachedir, path) cache(cell, path, vars=vars, force=args.force, verbose=not args.silent, read=args.read, # IPython methods ip_user_ns=ip.user_ns, ip_run_cell=ip.run_cell, ip_push=ip.push, ip_clear_output=clear_output )
def check_login(self, line): from core.auth import authenticate_user_credentials args = parse_argstring(self.check_login, line) if args.password is None: pw = getpass.getpass() else: pw = args.password user = authenticate_user_credentials(args.login_name, pw, {}) if user is None: user = q(User).filter_by(login_name=args.login_name).scalar() if user is None: print(u"login failed, no user with login name '{}'!".format(args.login_name)) else: print(u"user with login_name '{}' found, but login failed!".format(args.login_name)) else: print(u"login ok") return user
def bgwait(arg): """Wait for jobs to finish, up to timeout time""" if ext_state != "enabled": return if not sum(map(len, jobs.values())): print("No background jobs") return args = parse_argstring(bgwait, arg) timeout: int = args.timeout timeout = min(timeout, max_timeout_secs) start = time.time() now = time.time() while now - start < timeout: if not sum(map(len, jobs.values())): return time.sleep(timeout_step_secs) now = time.time() print("timeout reached")
def scrapy_attach(self, arg): try: args = parse_argstring(self.scrapy_attach, arg) host, port = args.host, args.port assert 1 <= port <= 65536 try: conn = get_rpyc_connection(host, port, debug) except socket.gaierror: print_err("Wrong host: " + host) return except socket.error: print_err("Connection failure") return side = RemoteScrapy(self.shell, conn) self.add_new_scrapy_side(side) return side except Exception as exc: print_err(exc, debug=debug)
def heat(self, line, cell): """Method to profile the python code in the ipython cell and display it as a heatmap using py-heat package. :param line: Line value for the ipython line this magic is called from. :param cell: Cell value for the ipython cell this magic is called from. """ args = magic_arguments.parse_argstring(self.heat, line) filename = args.out if filename is not None: filename = os.path.expanduser(args.out) tmp_file = 'ipython_cell_input.py' with open(tmp_file, 'wb') as f: f.write(cell.encode()) pyheat = PyHeat(tmp_file) pyheat.create_heatmap() pyheat.show_heatmap(output_file=filename) pyheat.close_heatmap()
def create_ruleset_for_group(self, line): args = parse_argstring(self.create_ruleset_for_group, line) group_name = args.group_name usergroup = q(UserGroup).filter_by(name=group_name).scalar() if usergroup is None: print("usergroup {} does not exist, doing nothing!".format( group_name)) return existing_ruleset = q(AccessRuleset).filter_by(name=group_name).scalar() if existing_ruleset is not None: print( "ruleset with the name of the usergroup already exists, doing nothing!" .format(group_name)) return rule = get_or_add_access_rule(group_ids=[usergroup.id]) ruleset = AccessRuleset(name=group_name, description=group_name) arr = AccessRulesetToRule(rule=rule) ruleset.rule_assocs.append(arr)
def checkmask(self, line): args = parse_argstring(self.checkmask, line) import schema.schema as metadatatypes if args.all: for mdt in q(Node).filter_by(type="metadatatype"): print("=" * 80) print("checking metadatatype", mdt.name) for mask in mdt.children.filter_by(type="mask"): print("-" * 80) print("checking mask {} of mdt {}".format( mask.name, mdt.name)) metadatatypes.checkMask(mask.m, fix=args.fix, verbose=1, show_unused=1) else: metadatatypes.checkMask(cnode.m, fix=args.fix, verbose=1, show_unused=1)
def unittest(self, line, cell): """Create test case from functions Parameters -p (--previous) P: Set cursor to P cells before -s (--stream) S: Set output stream (default: sys.stdout) -t (--testcase): Define TestCase name (default: JupyterTest) -u (--unparse): Show TestCase source code In [1]: %%unittest -t JupyterTestCase ...: assert 1 + 1 == 2 ...: 'other test' ...: assert 1 + 1 != 3 Out[1]: <unittest.runner.TextTestResult run=2 errors=0 failures=0> """ args = parse_argstring(self.unittest, line) tree = ast.parse(cell) tree = TransformAssert().visit(tree) tree = TransformFunction(args.testcase).visit(tree) return self.run_tests(get_ipython(), args, tree)
def execute(self, arg_string, content_string = None): args = parse_argstring(self.execute, arg_string) self._write_line("Viewing azure data lake job by id '%s' under account '%s'..." % (args.job_id, args.account)) job = self._adla_service.retrieve_job(args.account, args.job_id) self._write_line("Azure data lake job info:") self._write_line("\tName: %s" % (job.name)) self._write_line("\tType: %s" % (job.type)) self._write_line("\tSubmitter: %s" % (job.submitter)) self._write_line("\tParallelism: %d" % (job.parallelism)) self._write_line("\tPriority: %d" % (job.priority)) self._write_line("\tSubmit time: %s" % (str(job.submit_time))) self._write_line("\tStart time: %s" % (str(job.start_time))) self._write_line("\tEnd time: %s" % (str(job.end_time))) self._write_line("\tState: %s" % (job.state)) self._write_line("\tResult: %s" % (job.result)) return self._convert_to_df([job])
def list_container(self, line): args = magic_arguments.parse_argstring(self.list_container, line) if not args.i: raise UsageError('-i option is mandatory for listing') if not args.o: raise UsageError('-o option is mandatory for listing') if not args.o[0].startswith(tuple(string.ascii_letters)): raise UsageError('The output variable name must be a valid prefix ' 'of a python variable, that is, start with a ' 'letter') # Get the objects conn = get_swift_connection() _, objects = conn.get_container(args.i, full_listing=True) # Populate the returned list obj_names = [] for obj_dict in objects: obj_names.append(obj_dict['name']) self.shell.user_ns[args.o] = obj_names
def put(self, line): args = magic_arguments.parse_argstring(self.put, line) if not args.o: raise UsageError('-o option is mandatory for the invocation') if not args.o[0].startswith(tuple(string.ascii_letters)): raise UsageError('The output variable name must be a valid prefix ' 'of a python variable, that is, start with a ' 'letter') if not args.storlet: raise UsageError('--storlet option is mandatory ' 'for the invocation') if not args.input: raise UsageError('--input option is mandatory for the invocation') if not args.input.startswith('/'): raise UsageError('--input argument must be a full path') if not args.output: raise UsageError('--output option is mandatory for the invocation') dst_container, dst_obj = self._parse_input_path(args.output) headers = {'X-Run-Storlet': '%s' % args.storlet} headers.update(self._generate_params_headers(args)) # invoke storlet app on copy conn = get_swift_connection() response_dict = dict() with open(args.input, 'r') as content: resp_headers, resp_content_iter = conn.put_object( content, dst_container, dst_obj, resp_chunk_size=64 * 1024, headers=headers, response_dict=response_dict) res = dict() res['headers'] = resp_headers res['status'] = response_dict['status'] print('Invocation Complete') self.shell.user_ns[args.o] = res
def put(self, line): args = magic_arguments.parse_argstring(self.put, line) if not args.o: raise UsageError('-o option is mandatory for the invocation') if not args.o[0].startswith(tuple(string.ascii_letters)): raise UsageError('The output variable name must be a valid prefix ' 'of a python variable, that is, start with a ' 'letter') if not args.storlet: raise UsageError('--storlet option is mandatory ' 'for the invocation') if not args.input: raise UsageError('--input option is mandatory for the invocation') if not args.input.startswith('/'): raise UsageError('--input argument must be a full path') if not args.output: raise UsageError('--output option is mandatory for the invocation') dst_container, dst_obj = self._parse_input_path(args.output) headers = {'X-Run-Storlet': '%s' % args.storlet} # pick -i option and translate the params to # X-Storlet-Parameter-x headers storlet_headers = self._generate_params_headers( self.shell.user_ns[args.i] if args.i else {}) headers.update(storlet_headers) # invoke storlet app on copy conn = get_swift_connection() response_dict = dict() with open(args.input, 'r') as content: conn.put_object( dst_container, dst_obj, content, headers=headers, response_dict=response_dict) res = Response(int(response_dict['status']), response_dict['headers']) self.shell.user_ns[args.o] = res
def notebook(self, s): """Export and convert IPython notebooks. This function can export the current IPython history to a notebook file or can convert an existing notebook file into a different format. For example, to export the history to "foo.ipynb" do "%notebook -e foo.ipynb". To export the history to "foo.py" do "%notebook -e foo.py". To convert "foo.ipynb" to "foo.json" do "%notebook -f json foo.ipynb". Possible formats include (json/ipynb, py). """ args = magic_arguments.parse_argstring(self.notebook, s) from IPython.nbformat import current args.filename = unquote_filename(args.filename) if args.export: fname, name, format = current.parse_filename(args.filename) cells = [] hist = list(self.shell.history_manager.get_range()) for session, prompt_number, input in hist[:-1]: cells.append(current.new_code_cell(prompt_number=prompt_number, input=input)) worksheet = current.new_worksheet(cells=cells) nb = current.new_notebook(name=name,worksheets=[worksheet]) with io.open(fname, 'w', encoding='utf-8') as f: current.write(nb, f, format); elif args.format is not None: old_fname, old_name, old_format = current.parse_filename(args.filename) new_format = args.format if new_format == u'xml': raise ValueError('Notebooks cannot be written as xml.') elif new_format == u'ipynb' or new_format == u'json': new_fname = old_name + u'.ipynb' new_format = u'json' elif new_format == u'py': new_fname = old_name + u'.py' else: raise ValueError('Invalid notebook format: %s' % new_format) with io.open(old_fname, 'r', encoding='utf-8') as f: nb = current.read(f, old_format) with io.open(new_fname, 'w', encoding='utf-8') as f: current.write(nb, f, new_format)
def blockrun(self, line): line = line.strip() args = magic_arguments.parse_argstring(self.blockrun, line) obj = get_ipython().user_ns[args.button_widget] ### When a cell -> run all is executed, add a _has_been_clicked attribute to the widget ### and a handler for a "run all cells below" ### If _has_been_clicked is True, do nothing (let the rest of the notebook run) ### Otherwise block and replay=False if not hasattr(obj, '_has_been_clicked'): obj._has_been_clicked = False def handler(w): w._has_been_clicked = True run_all_below() obj.on_click(handler) if not getattr(obj, '_has_been_clicked'): return self.capture(lambda: obj._has_been_clicked, replay=False)
def savefig_dark(self, line='', cell=None): """ Saves a dark-background version of the Matplotlib figure generated in the enclosed cell. Arguments --------- figname : name of the figure to save Will be saved in directory for dark figures """ args = magic_arguments.parse_argstring(self.savefig_dark, line) if '.' in args.figname: pre, _, post = args.figname.rpartition('.') dark_figname = self.darkfigdir + pre + self.darkappend + '.' + post else: dark_figname = self.darkfigdir + args.figname + self.darkappend with plt.style.context((self.darkstyle)): self.shell.user_ns['is_dark'] = True self.shell.ex(cell) stderr.write('Saving dark figure as %s\n' % (dark_figname)) plt.savefig(dark_figname)
def pop_err(self, arg): """ Pop the last set of numpy numerical error handling settings from the stack. """ try: import numpy except ImportError: raise UsageError("could not import numpy.") args = parse_argstring(self.pop_err, arg) stack = getattr(self, '_numpy_err_stack', []) if stack: kwds, errcall = stack.pop() numpy.seterr(**kwds) numpy.seterrcall(errcall) elif not args.quiet: print("At the end of the stack.\n") self._numpy_err_stack = stack if not args.quiet: print_numpy_err(numpy.geterr(), numpy.geterrcall())
def spark_export_table(self, line, cell=None): global context_id args = magic_arguments.parse_argstring(self.spark_export_table, line) contextid='"%s"'%context_id output = widgets.Output() button = widgets.Button(description="Cancel") progress = widgets.FloatProgress(value=0.0, min=0.0, max=1.0) display(widgets.VBox([output,widgets.HBox([widgets.Label("Command: Export tables"),progress,button])])) if args.dataframe!=None: temp_export_table=export_table(args.dataframe,args.storename,args.tablename, args.partitionpath or "'/'") checkForLimits(temp_export_table["SparkDataFrameName"],temp_export_table["DataStoreName"]) command=execute_export_table_command(eval(contextid),temp_export_table) spark_magic(button,progress,command,None,output,self.shell.user_ns) else: for cell_line in cell.split('\n'): if cell_line != "": temp_export_table=eval(cell_line) checkForLimits(temp_export_table["SparkDataFrameName"],temp_export_table["DataStoreName"]) command=execute_export_table_command(eval(contextid),temp_export_table) spark_magic(button,progress,command,None,output,self.shell.user_ns)
def delete(self, line, cell="", local_ns=None): args = parse_argstring(self.delete, line) session = args.session[0] if args.force: id = self.spark_controller.get_session_id_for_client( self.session_name) if session == id: self.ipython_display.send_error( "Cannot delete this kernel's session ({}). Specify a different session," " shutdown the kernel to delete this session, or run %cleanup to " "delete all sessions for this endpoint.".format(id)) return None self.spark_controller.delete_session_by_id(self.connection_string, session) else: self.ipython_display.send_error( "Include the -f parameter if you understand that all statements executed" "in this session will be lost.") return None
def df(self, line='', code=None): args = magic_arguments.parse_argstring(self.df, line) if args.help: print(__doc__) return try: self.verbose = args.verbose self.path = args.path self.raw = args.raw self.on = args.on self.off = args.off self.one_off = True return self.run() except Exception as e: if self.verbose: print(e) raise
def sdsend(self, line, cell=None, local_ns={}): args = magic_arguments.parse_argstring(self.sdsend, line) commands = [] responses = [] template = jinja2.Template(cell) commands = template.render(config).splitlines() commands = [cmd for cmd in commands if cmd] for response in sessions.send(args.endpoints, commands, encoding=args.encoding): yml = _rep_response(response) if args.output_file: util.plush("Writing output to {}".format(args.output_file)) with open(args.output_file, "w") as fh: fh.write(yml + "\n") else: util.plush(yml + "\n")
def doctest_object(self, line): """ Run doctest of given objects. Example:: %doctest some_object other_object # run doctest for several objects %doctest -v some_object # verbose ouput %doctest -x some_object # debug doctest %doctest -f # doctest file Use the cell magic version of this magic command (``%%doctest``) to write doctest directly in IPython shell. """ args = parse_argstring(self.doctest_object, line) objects = list(map(self.shell.ev, args.object)) with self._doctest_report(len(objects)): for obj in objects: self._run_docstring_examples(obj, args)
def retract_facts(self, line, cell): "Retract and/or assert several facts." args = parse_argstring(self.retract_facts, line) if not args.ruleset and self.RULESET is None: warnings.warn( "You must provide a ruleset reference (--ruleset/-r RULESET).") return elif args.ruleset: self.RULESET = self.shell.user_ns[args.ruleset] _ruleset = self.RULESET #print(_ruleset) if args.no_reset: _delete_state(_ruleset) for _assertion in cell.split('\n'): if _assertion.startswith('*'): quick_assert_fact(_ruleset, _assertion.lstrip('-')) elif not _assertion.startswith('#'): quick_retract_fact(_ruleset, _assertion)