class RegistrationFactory(SessionFactory): """The Base Configurable for objects that involve registration.""" url = Str('', config=True) # url takes precedence over ip,regport,transport transport = Str('tcp', config=True) ip = Str('127.0.0.1', config=True) regport = Instance(int, config=True) def _regport_default(self): # return 10101 return select_random_ports(1)[0] def __init__(self, **kwargs): super(RegistrationFactory, self).__init__(**kwargs) self._propagate_url() self._rebuild_url() self.on_trait_change(self._propagate_url, 'url') self.on_trait_change(self._rebuild_url, 'ip') self.on_trait_change(self._rebuild_url, 'transport') self.on_trait_change(self._rebuild_url, 'regport') def _rebuild_url(self): self.url = "%s://%s:%i" % (self.transport, self.ip, self.regport) def _propagate_url(self): """Ensure self.url contains full transport://interface:port""" if self.url: iface = self.url.split('://', 1) if len(iface) == 2: self.transport, iface = iface iface = iface.split(':') self.ip = iface[0] if iface[1]: self.regport = int(iface[1])
class SSHLauncher(BaseLauncher): """A minimal launcher for ssh. To be useful this will probably have to be extended to use the ``sshx`` idea for environment variables. There could be other things this needs as well. """ ssh_cmd = List(['ssh'], config=True) ssh_args = List([], config=True) program = List(['date'], config=True) program_args = List([], config=True) hostname = Str('', config=True) user = Str('', config=True) location = Str('') def _hostname_changed(self, name, old, new): self.location = '%s@%s' % (self.user, new) def _user_changed(self, name, old, new): self.location = '%s@%s' % (new, self.hostname) def find_args(self): return self.ssh_cmd + self.ssh_args + [self.location] + \ self.program + self.program_args def start(self, n, hostname=None, user=None): if hostname is not None: self.hostname = hostname if user is not None: self.user = user return super(SSHLauncher, self).start()
class IPEngineTask(WinHPCTask): task_name = Str('IPEngine', config=True) engine_cmd = List(['ipengine.exe'], config=True) engine_args = List(['--log-to-file', '--log-level', '40'], config=True) # I don't want these to be configurable std_out_file_path = CStr('', config=False) std_err_file_path = CStr('', config=False) min_cores = Int(1, config=False) max_cores = Int(1, config=False) min_sockets = Int(1, config=False) max_sockets = Int(1, config=False) min_nodes = Int(1, config=False) max_nodes = Int(1, config=False) unit_type = Str("Core", config=False) work_directory = CStr('', config=False) def __init__(self, config=None): super(IPEngineTask, self).__init__(config=config) the_uuid = uuid.uuid1() self.std_out_file_path = os.path.join('log', 'ipengine-%s.out' % the_uuid) self.std_err_file_path = os.path.join('log', 'ipengine-%s.err' % the_uuid) @property def command_line(self): return ' '.join(self.engine_cmd + self.engine_args)
class PBSLauncher(BatchSystemLauncher): """A BatchSystemLauncher subclass for PBS.""" submit_command = Str('qsub', config=True) delete_command = Str('qdel', config=True) job_id_regexp = Str(r'\d+', config=True) batch_template = Str('', config=True) batch_file_name = Unicode(u'pbs_batch_script', config=True) batch_file = Unicode(u'')
class IPEngineSetJob(WinHPCJob): job_name = Str('IPEngineSet', config=False) is_exclusive = Bool(False, config=True) username = Str(find_username(), config=True) priority = Enum( ('Lowest', 'BelowNormal', 'Normal', 'AboveNormal', 'Highest'), default_value='Highest', config=True) requested_nodes = Str('', config=True) project = Str('IPython', config=True)
class JSONFormatter(BaseFormatter): """A JSON string formatter. To define the callables that compute the JSON string representation of your objects, define a :meth:`__json__` method or use the :meth:`for_type` or :meth:`for_type_by_name` methods to register functions that handle this. """ format_type = Str('application/json') print_method = Str('__json__')
class LatexFormatter(BaseFormatter): """A LaTeX formatter. To define the callables that compute the LaTeX representation of your objects, define a :meth:`__latex__` method or use the :meth:`for_type` or :meth:`for_type_by_name` methods to register functions that handle this. """ format_type = Str('text/latex') print_method = Str('__latex__')
class PNGFormatter(BaseFormatter): """A PNG formatter. To define the callables that compute the PNG representation of your objects, define a :meth:`__png__` method or use the :meth:`for_type` or :meth:`for_type_by_name` methods to register functions that handle this. The raw data should be the base64 encoded raw png data. """ format_type = Str('image/png') print_method = Str('__png__')
class SVGFormatter(BaseFormatter): """An SVG formatter. To define the callables that compute the SVG representation of your objects, define a :meth:`__svg__` method or use the :meth:`for_type` or :meth:`for_type_by_name` methods to register functions that handle this. """ format_type = Str('image/svg+xml') print_method = Str('__svg__')
class HTMLFormatter(BaseFormatter): """An HTML formatter. To define the callables that compute the HTML representation of your objects, define a :meth:`__html__` method or use the :meth:`for_type` or :meth:`for_type_by_name` methods to register functions that handle this. """ format_type = Str('text/html') print_method = Str('__html__')
class WinHPCTask(Configurable): task_id = Str('') task_name = Str('') version = Str("2.000") min_cores = Int(1, config=True) max_cores = Int(1, config=True) min_sockets = Int(1, config=True) max_sockets = Int(1, config=True) min_nodes = Int(1, config=True) max_nodes = Int(1, config=True) unit_type = Str("Core", config=True) command_line = CStr('', config=True) work_directory = CStr('', config=True) is_rerunnaable = Bool(True, config=True) std_out_file_path = CStr('', config=True) std_err_file_path = CStr('', config=True) is_parametric = Bool(False, config=True) environment_variables = Instance(dict, args=(), config=True) def _write_attr(self, root, attr, key): s = as_str(getattr(self, attr, '')) if s: root.set(key, s) def as_element(self): root = ET.Element('Task') self._write_attr(root, 'version', '_A_Version') self._write_attr(root, 'task_name', '_B_Name') self._write_attr(root, 'min_cores', '_C_MinCores') self._write_attr(root, 'max_cores', '_D_MaxCores') self._write_attr(root, 'min_sockets', '_E_MinSockets') self._write_attr(root, 'max_sockets', '_F_MaxSockets') self._write_attr(root, 'min_nodes', '_G_MinNodes') self._write_attr(root, 'max_nodes', '_H_MaxNodes') self._write_attr(root, 'command_line', '_I_CommandLine') self._write_attr(root, 'work_directory', '_J_WorkDirectory') self._write_attr(root, 'is_rerunnaable', '_K_IsRerunnable') self._write_attr(root, 'std_out_file_path', '_L_StdOutFilePath') self._write_attr(root, 'std_err_file_path', '_M_StdErrFilePath') self._write_attr(root, 'is_parametric', '_N_IsParametric') self._write_attr(root, 'unit_type', '_O_UnitType') root.append(self.get_env_vars()) return root def get_env_vars(self): env_vars = ET.Element('EnvironmentVariables') for k, v in self.environment_variables.iteritems(): variable = ET.SubElement(env_vars, "Variable") name = ET.SubElement(variable, "Name") name.text = k value = ET.SubElement(variable, "Value") value.text = v return env_vars
class EngineConnector(HasTraits): """A simple object for accessing the various zmq connections of an object. Attributes are: id (int): engine ID uuid (str): uuid (unused?) queue (str): identity of queue's XREQ socket registration (str): identity of registration XREQ socket heartbeat (str): identity of heartbeat XREQ socket """ id = Int(0) queue = Str() control = Str() registration = Str() heartbeat = Str() pending = Set()
class PrefilterHandler(Configurable): handler_name = Str('normal') esc_strings = List([]) shell = Instance('IPython.core.interactiveshell.InteractiveShellABC') prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager') def __init__(self, shell=None, prefilter_manager=None, config=None): super(PrefilterHandler, self).__init__(shell=shell, prefilter_manager=prefilter_manager, config=config) self.prefilter_manager.register_handler(self.handler_name, self, self.esc_strings) def handle(self, line_info): # print "normal: ", line_info """Handle normal input lines. Use as a template for handlers.""" # With autoindent on, we need some way to exit the input loop, and I # don't want to force the user to have to backspace all the way to # clear the line. The rule will be in this case, that either two # lines of pure whitespace in a row, or a line of pure whitespace but # of a size different to the indent level, will exit the input loop. line = line_info.line continue_prompt = line_info.continue_prompt if (continue_prompt and self.shell.autoindent and line.isspace() and 0 < abs(len(line) - self.shell.indent_current_nsp) <= 2): line = '' return line def __str__(self): return "<%s(name=%s)>" % (self.__class__.__name__, self.handler_name)
class ShellEscapeHandler(PrefilterHandler): handler_name = Str('shell') esc_strings = List([ESC_SHELL, ESC_SH_CAP]) def handle(self, line_info): """Execute the line in a shell, empty return value""" magic_handler = self.prefilter_manager.get_handler_by_name('magic') line = line_info.line if line.lstrip().startswith(ESC_SH_CAP): # rewrite LineInfo's line, ifun and the_rest to properly hold the # call to %sx and the actual command to be executed, so # handle_magic can work correctly. Note that this works even if # the line is indented, so it handles multi_line_specials # properly. new_rest = line.lstrip()[2:] line_info.line = '%ssx %s' % (ESC_MAGIC, new_rest) line_info.ifun = 'sx' line_info.the_rest = new_rest return magic_handler.handle(line_info) else: cmd = line.lstrip().lstrip(ESC_SHELL) line_out = '%sget_ipython().system(%s)' % ( line_info.pre_whitespace, make_quoted_expr(cmd)) return line_out
class MacroHandler(PrefilterHandler): handler_name = Str("macro") def handle(self, line_info): obj = self.shell.user_ns.get(line_info.ifun) pre_space = line_info.pre_whitespace line_sep = "\n" + pre_space return pre_space + line_sep.join(obj.value.splitlines())
class AutoHandler(PrefilterHandler): handler_name = Str('auto') esc_strings = List([ESC_PAREN, ESC_QUOTE, ESC_QUOTE2]) def handle(self, line_info): """Handle lines which can be auto-executed, quoting if requested.""" line = line_info.line ifun = line_info.ifun the_rest = line_info.the_rest pre = line_info.pre continue_prompt = line_info.continue_prompt obj = line_info.ofind(self)['obj'] #print 'pre <%s> ifun <%s> rest <%s>' % (pre,ifun,the_rest) # dbg # This should only be active for single-line input! if continue_prompt: return line force_auto = isinstance(obj, IPyAutocall) auto_rewrite = True if pre == ESC_QUOTE: # Auto-quote splitting on whitespace newcmd = '%s("%s")' % (ifun, '", "'.join(the_rest.split())) elif pre == ESC_QUOTE2: # Auto-quote whole string newcmd = '%s("%s")' % (ifun, the_rest) elif pre == ESC_PAREN: newcmd = '%s(%s)' % (ifun, ",".join(the_rest.split())) else: # Auto-paren. # We only apply it to argument-less calls if the autocall # parameter is set to 2. We only need to check that autocall is < # 2, since this function isn't called unless it's at least 1. if not the_rest and (self.shell.autocall < 2) and not force_auto: newcmd = '%s %s' % (ifun, the_rest) auto_rewrite = False else: if not force_auto and the_rest.startswith('['): if hasattr(obj, '__getitem__'): # Don't autocall in this case: item access for an object # which is BOTH callable and implements __getitem__. newcmd = '%s %s' % (ifun, the_rest) auto_rewrite = False else: # if the object doesn't support [] access, go ahead and # autocall newcmd = '%s(%s)' % (ifun.rstrip(), the_rest) elif the_rest.endswith(';'): newcmd = '%s(%s);' % (ifun.rstrip(), the_rest[:-1]) else: newcmd = '%s(%s)' % (ifun.rstrip(), the_rest) if auto_rewrite: self.shell.auto_rewrite_input(newcmd) return newcmd
class SessionFactory(LoggingFactory): """The Base factory from which every factory in IPython.parallel inherits""" packer = Str('', config=True) unpacker = Str('', config=True) ident = CStr('', config=True) def _ident_default(self): return str(uuid.uuid4()) username = CUnicode(os.environ.get('USER', 'username'), config=True) exec_key = CUnicode('', config=True) # not configurable: context = Instance('zmq.Context', (), {}) session = Instance('IPython.parallel.streamsession.StreamSession') loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False) def _loop_default(self): return IOLoop.instance() def __init__(self, **kwargs): super(SessionFactory, self).__init__(**kwargs) exec_key = self.exec_key or None # set the packers: if not self.packer: packer_f = unpacker_f = None elif self.packer.lower() == 'json': packer_f = ss.json_packer unpacker_f = ss.json_unpacker elif self.packer.lower() == 'pickle': packer_f = ss.pickle_packer unpacker_f = ss.pickle_unpacker else: packer_f = import_item(self.packer) unpacker_f = import_item(self.unpacker) # construct the session self.session = ss.StreamSession(self.username, self.ident, packer=packer_f, unpacker=unpacker_f, key=exec_key)
class MagicHandler(PrefilterHandler): handler_name = Str('magic') esc_strings = List([ESC_MAGIC]) def handle(self, line_info): """Execute magic functions.""" ifun = line_info.ifun the_rest = line_info.the_rest cmd = '%sget_ipython().magic(%s)' % ( line_info.pre_whitespace, make_quoted_expr(ifun + " " + the_rest)) return cmd
class EmacsHandler(PrefilterHandler): handler_name = Str('emacs') esc_strings = List([]) def handle(self, line_info): """Handle input lines marked by python-mode.""" # Currently, nothing is done. Later more functionality can be added # here if needed. # The input cache shouldn't be updated return line_info.line
class AliasHandler(PrefilterHandler): handler_name = Str('alias') def handle(self, line_info): """Handle alias input lines. """ transformed = self.shell.alias_manager.expand_aliases(line_info.ifun,line_info.the_rest) # pre is needed, because it carries the leading whitespace. Otherwise # aliases won't work in indented sections. line_out = '%sget_ipython().system(%s)' % (line_info.pre_whitespace, make_quoted_expr(transformed)) return line_out
class IPClusterLauncher(LocalProcessLauncher): """Launch the ipcluster program in an external process.""" ipcluster_cmd = List(ipcluster_cmd_argv, config=True) # Command line arguments to pass to ipcluster. ipcluster_args = List( ['--clean-logs', '--log-to-file', '--log-level', '40'], config=True) ipcluster_subcommand = Str('start') ipcluster_n = Int(2) def find_args(self): return self.ipcluster_cmd + [self.ipcluster_subcommand] + \ ['-n', repr(self.ipcluster_n)] + self.ipcluster_args def start(self): log.msg("Starting ipcluster: %r" % self.args) return super(IPClusterLauncher, self).start()
class PrefilterHandler(Component): handler_name = Str('normal') esc_strings = List([]) shell = Any prefilter_manager = Any def __init__(self, parent, config=None): super(PrefilterHandler, self).__init__(parent, config=config) self.prefilter_manager.register_handler(self.handler_name, self, self.esc_strings) @auto_attr def shell(self): return Component.get_instances( root=self.root, klass='IPython.core.iplib.InteractiveShell')[0] @auto_attr def prefilter_manager(self): return PrefilterManager.get_instances(root=self.root)[0] def handle(self, line_info): # print "normal: ", line_info """Handle normal input lines. Use as a template for handlers.""" # With autoindent on, we need some way to exit the input loop, and I # don't want to force the user to have to backspace all the way to # clear the line. The rule will be in this case, that either two # lines of pure whitespace in a row, or a line of pure whitespace but # of a size different to the indent level, will exit the input loop. line = line_info.line continue_prompt = line_info.continue_prompt if (continue_prompt and self.shell.autoindent and line.isspace() and (0 < abs(len(line) - self.shell.indent_current_nsp) <= 2 or (self.shell.buffer[-1]).isspace())): line = '' self.shell.log(line, line, continue_prompt) return line def __str__(self): return "<%s(name=%s)>" % (self.__class__.__name__, self.handler_name)
class AliasHandler(PrefilterHandler): handler_name = Str('alias') @auto_attr def alias_manager(self): return AliasManager.get_instances(root=self.root)[0] def handle(self, line_info): """Handle alias input lines. """ transformed = self.alias_manager.expand_aliases( line_info.ifun, line_info.the_rest) # pre is needed, because it carries the leading whitespace. Otherwise # aliases won't work in indented sections. line_out = '%sget_ipython().system(%s)' % ( line_info.pre_whitespace, make_quoted_expr(transformed)) self.shell.log(line_info.line, line_out, line_info.continue_prompt) return line_out
class HelpHandler(PrefilterHandler): handler_name = Str('help') esc_strings = List([ESC_HELP]) def handle(self, line_info): """Try to get some help for the object. obj? or ?obj -> basic information. obj?? or ??obj -> more details. """ normal_handler = self.prefilter_manager.get_handler_by_name('normal') line = line_info.line # We need to make sure that we don't process lines which would be # otherwise valid python, such as "x=1 # what?" try: codeop.compile_command(line) except SyntaxError: # We should only handle as help stuff which is NOT valid syntax if line[0] == ESC_HELP: line = line[1:] elif line[-1] == ESC_HELP: line = line[:-1] self.shell.log(line, '#?' + line, line_info.continue_prompt) if line: #print 'line:<%r>' % line # dbg self.shell.magic_pinfo(line) else: page(self.shell.usage, screen_lines=self.shell.usable_screen_length) return '' # Empty string is needed here! except: raise # Pass any other exceptions through to the normal handler return normal_handler.handle(line_info) else: raise # If the code compiles ok, we should handle it normally return normal_handler.handle(line_info)
class IPythonWidget(FrontendWidget): """ A FrontendWidget for an IPython kernel. """ # If set, the 'custom_edit_requested(str, int)' signal will be emitted when # an editor is needed for a file. This overrides 'editor' and 'editor_line' # settings. custom_edit = Bool(False) custom_edit_requested = QtCore.Signal(object, object) # A command for invoking a system text editor. If the string contains a # {filename} format specifier, it will be used. Otherwise, the filename will # be appended to the end the command. editor = Unicode('default', config=True) # The editor command to use when a specific line number is requested. The # string should contain two format specifiers: {line} and {filename}. If # this parameter is not specified, the line number option to the %edit magic # will be ignored. editor_line = Unicode(config=True) # A CSS stylesheet. The stylesheet can contain classes for: # 1. Qt: QPlainTextEdit, QFrame, QWidget, etc # 2. Pygments: .c, .k, .o, etc (see PygmentsHighlighter) # 3. IPython: .error, .in-prompt, .out-prompt, etc style_sheet = Unicode(config=True) # If not empty, use this Pygments style for syntax highlighting. Otherwise, # the style sheet is queried for Pygments style information. syntax_style = Str(config=True) # Prompts. in_prompt = Str(default_in_prompt, config=True) out_prompt = Str(default_out_prompt, config=True) input_sep = Str(default_input_sep, config=True) output_sep = Str(default_output_sep, config=True) output_sep2 = Str(default_output_sep2, config=True) # FrontendWidget protected class variables. _input_splitter_class = IPythonInputSplitter # IPythonWidget protected class variables. _PromptBlock = namedtuple('_PromptBlock', ['block', 'length', 'number']) _payload_source_edit = zmq_shell_source + '.edit_magic' _payload_source_exit = zmq_shell_source + '.ask_exit' _payload_source_loadpy = zmq_shell_source + '.magic_loadpy' _payload_source_page = 'IPython.zmq.page.page' #--------------------------------------------------------------------------- # 'object' interface #--------------------------------------------------------------------------- def __init__(self, *args, **kw): super(IPythonWidget, self).__init__(*args, **kw) # IPythonWidget protected variables. self._code_to_load = None self._payload_handlers = { self._payload_source_edit: self._handle_payload_edit, self._payload_source_exit: self._handle_payload_exit, self._payload_source_page: self._handle_payload_page, self._payload_source_loadpy: self._handle_payload_loadpy } self._previous_prompt_obj = None self._keep_kernel_on_exit = None # Initialize widget styling. if self.style_sheet: self._style_sheet_changed() self._syntax_style_changed() else: self.set_default_style() #--------------------------------------------------------------------------- # 'BaseFrontendMixin' abstract interface #--------------------------------------------------------------------------- def _handle_complete_reply(self, rep): """ Reimplemented to support IPython's improved completion machinery. """ cursor = self._get_cursor() info = self._request_info.get('complete') if info and info.id == rep['parent_header']['msg_id'] and \ info.pos == cursor.position(): matches = rep['content']['matches'] text = rep['content']['matched_text'] offset = len(text) # Clean up matches with period and path separators if the matched # text has not been transformed. This is done by truncating all # but the last component and then suitably decreasing the offset # between the current cursor position and the start of completion. if len(matches) > 1 and matches[0][:offset] == text: parts = re.split(r'[./\\]', text) sep_count = len(parts) - 1 if sep_count: chop_length = sum(map(len, parts[:sep_count])) + sep_count matches = [match[chop_length:] for match in matches] offset -= chop_length # Move the cursor to the start of the match and complete. cursor.movePosition(QtGui.QTextCursor.Left, n=offset) self._complete_with_items(cursor, matches) def _handle_execute_reply(self, msg): """ Reimplemented to support prompt requests. """ info = self._request_info.get('execute') if info and info.id == msg['parent_header']['msg_id']: if info.kind == 'prompt': number = msg['content']['execution_count'] + 1 self._show_interpreter_prompt(number) else: super(IPythonWidget, self)._handle_execute_reply(msg) def _handle_history_tail_reply(self, msg): """ Implemented to handle history tail replies, which are only supported by the IPython kernel. """ history_items = msg['content']['history'] items = [line.rstrip() for _, _, line in history_items] self._set_history(items) def _handle_pyout(self, msg): """ Reimplemented for IPython-style "display hook". """ if not self._hidden and self._is_from_this_session(msg): content = msg['content'] prompt_number = content['execution_count'] data = content['data'] if data.has_key('text/html'): self._append_plain_text(self.output_sep) self._append_html(self._make_out_prompt(prompt_number)) html = data['text/html'] self._append_plain_text('\n') self._append_html(html + self.output_sep2) elif data.has_key('text/plain'): self._append_plain_text(self.output_sep) self._append_html(self._make_out_prompt(prompt_number)) text = data['text/plain'] self._append_plain_text(text + self.output_sep2) def _handle_display_data(self, msg): """ The base handler for the ``display_data`` message. """ # For now, we don't display data from other frontends, but we # eventually will as this allows all frontends to monitor the display # data. But we need to figure out how to handle this in the GUI. if not self._hidden and self._is_from_this_session(msg): source = msg['content']['source'] data = msg['content']['data'] metadata = msg['content']['metadata'] # In the regular IPythonWidget, we simply print the plain text # representation. if data.has_key('text/html'): html = data['text/html'] self._append_html(html) elif data.has_key('text/plain'): text = data['text/plain'] self._append_plain_text(text) # This newline seems to be needed for text and html output. self._append_plain_text(u'\n') def _started_channels(self): """ Reimplemented to make a history request. """ super(IPythonWidget, self)._started_channels() self.kernel_manager.xreq_channel.history_tail(1000) #--------------------------------------------------------------------------- # 'ConsoleWidget' public interface #--------------------------------------------------------------------------- def copy(self): """ Copy the currently selected text to the clipboard, removing prompts if possible. """ text = self._control.textCursor().selection().toPlainText() if text: lines = map(transform_ipy_prompt, text.splitlines()) text = '\n'.join(lines) QtGui.QApplication.clipboard().setText(text) #--------------------------------------------------------------------------- # 'FrontendWidget' public interface #--------------------------------------------------------------------------- def execute_file(self, path, hidden=False): """ Reimplemented to use the 'run' magic. """ # Use forward slashes on Windows to avoid escaping each separator. if sys.platform == 'win32': path = os.path.normpath(path).replace('\\', '/') self.execute('%%run %s' % path, hidden=hidden) #--------------------------------------------------------------------------- # 'FrontendWidget' protected interface #--------------------------------------------------------------------------- def _complete(self): """ Reimplemented to support IPython's improved completion machinery. """ # We let the kernel split the input line, so we *always* send an empty # text field. Readline-based frontends do get a real text field which # they can use. text = '' # Send the completion request to the kernel msg_id = self.kernel_manager.xreq_channel.complete( text, # text self._get_input_buffer_cursor_line(), # line self._get_input_buffer_cursor_column(), # cursor_pos self.input_buffer) # block pos = self._get_cursor().position() info = self._CompletionRequest(msg_id, pos) self._request_info['complete'] = info def _get_banner(self): """ Reimplemented to return IPython's default banner. """ return default_gui_banner def _process_execute_error(self, msg): """ Reimplemented for IPython-style traceback formatting. """ content = msg['content'] traceback = '\n'.join(content['traceback']) + '\n' if False: # FIXME: For now, tracebacks come as plain text, so we can't use # the html renderer yet. Once we refactor ultratb to produce # properly styled tracebacks, this branch should be the default traceback = traceback.replace(' ', ' ') traceback = traceback.replace('\n', '<br/>') ename = content['ename'] ename_styled = '<span class="error">%s</span>' % ename traceback = traceback.replace(ename, ename_styled) self._append_html(traceback) else: # This is the fallback for now, using plain text with ansi escapes self._append_plain_text(traceback) def _process_execute_payload(self, item): """ Reimplemented to dispatch payloads to handler methods. """ handler = self._payload_handlers.get(item['source']) if handler is None: # We have no handler for this type of payload, simply ignore it return False else: handler(item) return True def _show_interpreter_prompt(self, number=None): """ Reimplemented for IPython-style prompts. """ # If a number was not specified, make a prompt number request. if number is None: msg_id = self.kernel_manager.xreq_channel.execute('', silent=True) info = self._ExecutionRequest(msg_id, 'prompt') self._request_info['execute'] = info return # Show a new prompt and save information about it so that it can be # updated later if the prompt number turns out to be wrong. self._prompt_sep = self.input_sep self._show_prompt(self._make_in_prompt(number), html=True) block = self._control.document().lastBlock() length = len(self._prompt) self._previous_prompt_obj = self._PromptBlock(block, length, number) # Update continuation prompt to reflect (possibly) new prompt length. self._set_continuation_prompt(self._make_continuation_prompt( self._prompt), html=True) # Load code from the %loadpy magic, if necessary. if self._code_to_load is not None: self.input_buffer = dedent(self._code_to_load.rstrip()) self._code_to_load = None def _show_interpreter_prompt_for_reply(self, msg): """ Reimplemented for IPython-style prompts. """ # Update the old prompt number if necessary. content = msg['content'] previous_prompt_number = content['execution_count'] if self._previous_prompt_obj and \ self._previous_prompt_obj.number != previous_prompt_number: block = self._previous_prompt_obj.block # Make sure the prompt block has not been erased. if block.isValid() and block.text(): # Remove the old prompt and insert a new prompt. cursor = QtGui.QTextCursor(block) cursor.movePosition(QtGui.QTextCursor.Right, QtGui.QTextCursor.KeepAnchor, self._previous_prompt_obj.length) prompt = self._make_in_prompt(previous_prompt_number) self._prompt = self._insert_html_fetching_plain_text( cursor, prompt) # When the HTML is inserted, Qt blows away the syntax # highlighting for the line, so we need to rehighlight it. self._highlighter.rehighlightBlock(cursor.block()) self._previous_prompt_obj = None # Show a new prompt with the kernel's estimated prompt number. self._show_interpreter_prompt(previous_prompt_number + 1) #--------------------------------------------------------------------------- # 'IPythonWidget' interface #--------------------------------------------------------------------------- def set_default_style(self, colors='lightbg'): """ Sets the widget style to the class defaults. Parameters: ----------- colors : str, optional (default lightbg) Whether to use the default IPython light background or dark background or B&W style. """ colors = colors.lower() if colors == 'lightbg': self.style_sheet = default_light_style_sheet self.syntax_style = default_light_syntax_style elif colors == 'linux': self.style_sheet = default_dark_style_sheet self.syntax_style = default_dark_syntax_style elif colors == 'nocolor': self.style_sheet = default_bw_style_sheet self.syntax_style = default_bw_syntax_style else: raise KeyError("No such color scheme: %s" % colors) #--------------------------------------------------------------------------- # 'IPythonWidget' protected interface #--------------------------------------------------------------------------- def _edit(self, filename, line=None): """ Opens a Python script for editing. Parameters: ----------- filename : str A path to a local system file. line : int, optional A line of interest in the file. """ if self.custom_edit: self.custom_edit_requested.emit(filename, line) elif self.editor == 'default': self._append_plain_text('No default editor available.\n') else: try: filename = '"%s"' % filename if line and self.editor_line: command = self.editor_line.format(filename=filename, line=line) else: try: command = self.editor.format() except KeyError: command = self.editor.format(filename=filename) else: command += ' ' + filename except KeyError: self._append_plain_text('Invalid editor command.\n') else: try: Popen(command, shell=True) except OSError: msg = 'Opening editor with command "%s" failed.\n' self._append_plain_text(msg % command) def _make_in_prompt(self, number): """ Given a prompt number, returns an HTML In prompt. """ body = self.in_prompt % number return '<span class="in-prompt">%s</span>' % body def _make_continuation_prompt(self, prompt): """ Given a plain text version of an In prompt, returns an HTML continuation prompt. """ end_chars = '...: ' space_count = len(prompt.lstrip('\n')) - len(end_chars) body = ' ' * space_count + end_chars return '<span class="in-prompt">%s</span>' % body def _make_out_prompt(self, number): """ Given a prompt number, returns an HTML Out prompt. """ body = self.out_prompt % number return '<span class="out-prompt">%s</span>' % body #------ Payload handlers -------------------------------------------------- # Payload handlers with a generic interface: each takes the opaque payload # dict, unpacks it and calls the underlying functions with the necessary # arguments. def _handle_payload_edit(self, item): self._edit(item['filename'], item['line_number']) def _handle_payload_exit(self, item): self._keep_kernel_on_exit = item['keepkernel'] self.exit_requested.emit() def _handle_payload_loadpy(self, item): # Simple save the text of the .py file for later. The text is written # to the buffer when _prompt_started_hook is called. self._code_to_load = item['text'] def _handle_payload_page(self, item): # Since the plain text widget supports only a very small subset of HTML # and we have no control over the HTML source, we only page HTML # payloads in the rich text widget. if item['html'] and self.kind == 'rich': self._page(item['html'], html=True) else: self._page(item['text'], html=False) #------ Trait change handlers -------------------------------------------- def _style_sheet_changed(self): """ Set the style sheets of the underlying widgets. """ self.setStyleSheet(self.style_sheet) self._control.document().setDefaultStyleSheet(self.style_sheet) if self._page_control: self._page_control.document().setDefaultStyleSheet( self.style_sheet) bg_color = self._control.palette().window().color() self._ansi_processor.set_background_color(bg_color) def _syntax_style_changed(self): """ Set the style for the syntax highlighter. """ if self.syntax_style: self._highlighter.set_style(self.syntax_style) else: self._highlighter.set_style_sheet(self.style_sheet)
class WindowsHPCLauncher(BaseLauncher): # A regular expression used to get the job id from the output of the # submit_command. job_id_regexp = Str(r'\d+', config=True) # The filename of the instantiated job script. job_file_name = CUnicode(u'ipython_job.xml', config=True) # The full path to the instantiated job script. This gets made dynamically # by combining the work_dir with the job_file_name. job_file = CUnicode(u'') # The hostname of the scheduler to submit the job to scheduler = CUnicode('', config=True) job_cmd = CUnicode(find_job_cmd(), config=True) def __init__(self, work_dir=u'.', config=None, **kwargs): super(WindowsHPCLauncher, self).__init__(work_dir=work_dir, config=config, **kwargs) @property def job_file(self): return os.path.join(self.work_dir, self.job_file_name) def write_job_file(self, n): raise NotImplementedError("Implement write_job_file in a subclass.") def find_args(self): return [u'job.exe'] def parse_job_id(self, output): """Take the output of the submit command and return the job id.""" m = re.search(self.job_id_regexp, output) if m is not None: job_id = m.group() else: raise LauncherError("Job id couldn't be determined: %s" % output) self.job_id = job_id self.log.info('Job started with job id: %r' % job_id) return job_id def start(self, n): """Start n copies of the process using the Win HPC job scheduler.""" self.write_job_file(n) args = [ 'submit', '/jobfile:%s' % self.job_file, '/scheduler:%s' % self.scheduler ] self.log.info("Starting Win HPC Job: %s" % (self.job_cmd + ' ' + ' '.join(args), )) # Twisted will raise DeprecationWarnings if we try to pass unicode to this output = check_output([self.job_cmd] + args, env=os.environ, cwd=self.work_dir, stderr=STDOUT) job_id = self.parse_job_id(output) self.notify_start(job_id) return job_id def stop(self): args = ['cancel', self.job_id, '/scheduler:%s' % self.scheduler] self.log.info("Stopping Win HPC Job: %s" % (self.job_cmd + ' ' + ' '.join(args), )) try: output = check_output([self.job_cmd] + args, env=os.environ, cwd=self.work_dir, stderr=STDOUT) except: output = 'The job already appears to be stoppped: %r' % self.job_id self.notify_stop( dict(job_id=self.job_id, output=output)) # Pass the output of the kill cmd return output
class InteractiveShellEmbed(TerminalInteractiveShell): dummy_mode = Bool(False) exit_msg = Str('') embedded = CBool(True) embedded_active = CBool(True) # Like the base class display_banner is not configurable, but here it # is True by default. display_banner = CBool(True) def __init__(self, config=None, ipython_dir=None, user_ns=None, user_global_ns=None, custom_exceptions=((),None), usage=None, banner1=None, banner2=None, display_banner=None, exit_msg=''): super(InteractiveShellEmbed,self).__init__( config=config, ipython_dir=ipython_dir, user_ns=user_ns, user_global_ns=user_global_ns, custom_exceptions=custom_exceptions, usage=usage, banner1=banner1, banner2=banner2, display_banner=display_banner ) self.exit_msg = exit_msg self.define_magic("kill_embedded", kill_embedded) # don't use the ipython crash handler so that user exceptions aren't # trapped sys.excepthook = ultratb.FormattedTB(color_scheme=self.colors, mode=self.xmode, call_pdb=self.pdb) def init_sys_modules(self): pass def __call__(self, header='', local_ns=None, global_ns=None, dummy=None, stack_depth=1): """Activate the interactive interpreter. __call__(self,header='',local_ns=None,global_ns,dummy=None) -> Start the interpreter shell with the given local and global namespaces, and optionally print a header string at startup. The shell can be globally activated/deactivated using the set/get_dummy_mode methods. This allows you to turn off a shell used for debugging globally. However, *each* time you call the shell you can override the current state of dummy_mode with the optional keyword parameter 'dummy'. For example, if you set dummy mode on with IPShell.set_dummy_mode(1), you can still have a specific call work by making it as IPShell(dummy=0). The optional keyword parameter dummy controls whether the call actually does anything. """ # If the user has turned it off, go away if not self.embedded_active: return # Normal exits from interactive mode set this flag, so the shell can't # re-enter (it checks this variable at the start of interactive mode). self.exit_now = False # Allow the dummy parameter to override the global __dummy_mode if dummy or (dummy != 0 and self.dummy_mode): return if self.has_readline: self.set_completer_frame() # self.banner is auto computed if header: self.old_banner2 = self.banner2 self.banner2 = self.banner2 + '\n' + header + '\n' else: self.old_banner2 = '' # Call the embedding code with a stack depth of 1 so it can skip over # our call and get the original caller's namespaces. self.mainloop(local_ns, global_ns, stack_depth=stack_depth) self.banner2 = self.old_banner2 if self.exit_msg is not None: print self.exit_msg def mainloop(self, local_ns=None, global_ns=None, stack_depth=0, display_banner=None): """Embeds IPython into a running python program. Input: - header: An optional header message can be specified. - local_ns, global_ns: working namespaces. If given as None, the IPython-initialized one is updated with __main__.__dict__, so that program variables become visible but user-specific configuration remains possible. - stack_depth: specifies how many levels in the stack to go to looking for namespaces (when local_ns and global_ns are None). This allows an intermediate caller to make sure that this function gets the namespace from the intended level in the stack. By default (0) it will get its locals and globals from the immediate caller. Warning: it's possible to use this in a program which is being run by IPython itself (via %run), but some funny things will happen (a few globals get overwritten). In the future this will be cleaned up, as there is no fundamental reason why it can't work perfectly.""" # Get locals and globals from caller if local_ns is None or global_ns is None: call_frame = sys._getframe(stack_depth).f_back if local_ns is None: local_ns = call_frame.f_locals if global_ns is None: global_ns = call_frame.f_globals # Update namespaces and fire up interpreter # The global one is easy, we can just throw it in self.user_global_ns = global_ns # but the user/local one is tricky: ipython needs it to store internal # data, but we also need the locals. We'll copy locals in the user # one, but will track what got copied so we can delete them at exit. # This is so that a later embedded call doesn't see locals from a # previous call (which most likely existed in a separate scope). local_varnames = local_ns.keys() self.user_ns.update(local_ns) #self.user_ns['local_ns'] = local_ns # dbg # Patch for global embedding to make sure that things don't overwrite # user globals accidentally. Thanks to Richard <*****@*****.**> # FIXME. Test this a bit more carefully (the if.. is new) if local_ns is None and global_ns is None: self.user_global_ns.update(__main__.__dict__) # make sure the tab-completer has the correct frame information, so it # actually completes using the frame's locals/globals self.set_completer_frame() with nested(self.builtin_trap, self.display_trap): self.interact(display_banner=display_banner) # now, purge out the user namespace from anything we might have added # from the caller's local namespace delvar = self.user_ns.pop for var in local_varnames: delvar(var,None)
class FCServiceFactory(AdaptedConfiguredObjectFactory): """This class creates a tub with various services running in it. The basic idea is that :meth:`create` returns a running :class:`Tub` instance that has a number of Foolscap references registered in it. This class is a subclass of :class:`IPython.config.configurable.Configurable` so the IPython configuration system is used. Attributes ---------- interfaces : Config A Config instance whose values are sub-Config objects having two keys: furl_file and interface_chain. The other attributes are the standard ones for Foolscap. """ ip = Str('', config=True) port = Int(0, config=True) secure = Bool(True, config=True) cert_file = Str('', config=True) location = Str('', config=True) reuse_furls = Bool(False, config=True) interfaces = Instance(klass=Config, kw={}, allow_none=False, config=True) def __init__(self, config=None, adaptee=None): super(FCServiceFactory, self).__init__(config=config, adaptee=adaptee) self._check_reuse_furls() def _ip_changed(self, name, old, new): if new == 'localhost' or new == '127.0.0.1': self.location = '127.0.0.1' def _check_reuse_furls(self): furl_files = [i.furl_file for i in self.interfaces.values()] for ff in furl_files: fullfile = self._get_security_file(ff) if self.reuse_furls: if self.port == 0: raise FURLError( "You are trying to reuse the FURL file " "for this connection, but the port for this connection " "is set to 0 (autoselect). To reuse the FURL file " "you need to specify specific port to listen on.") else: log.msg("Reusing FURL file: %s" % fullfile) else: if os.path.isfile(fullfile): log.msg("Removing old FURL file: %s" % fullfile) os.remove(fullfile) def _get_security_file(self, filename): return os.path.join(self.config.Global.security_dir, filename) def create(self): """Create and return the Foolscap tub with everything running.""" self.tub, self.listener = make_tub( self.ip, self.port, self.secure, self._get_security_file(self.cert_file)) # log.msg("Interfaces to register [%r]: %r" % \ # (self.__class__, self.interfaces)) if not self.secure: log.msg("WARNING: running with no security: %s" % \ self.__class__.__name__) reactor.callWhenRunning(self.set_location_and_register) return self.tub def set_location_and_register(self): """Set the location for the tub and return a deferred.""" if self.location == '': d = self.tub.setLocationAutomatically() else: d = defer.maybeDeferred( self.tub.setLocation, "%s:%i" % (self.location, self.listener.getPortnum())) self.adapt_to_interfaces(d) def adapt_to_interfaces(self, d): """Run through the interfaces, adapt and register.""" for ifname, ifconfig in self.interfaces.iteritems(): ff = self._get_security_file(ifconfig.furl_file) log.msg("Adapting [%s] to interface: %s" % \ (self.adaptee.__class__.__name__, ifname)) log.msg("Saving FURL for interface [%s] to file: %s" % (ifname, ff)) check_furl_file_security(ff, self.secure) adaptee = self.adaptee for i in ifconfig.interface_chain: adaptee = import_item(i)(adaptee) d.addCallback(self.register, adaptee, furl_file=ff) def register(self, empty, ref, furl_file): """Register the reference with the FURL file. The FURL file is created and then moved to make sure that when the file appears, the buffer has been flushed and the file closed. This is not done if we are re-using FURLS however. """ if self.reuse_furls: self.tub.registerReference(ref, furlFile=furl_file) else: temp_furl_file = get_temp_furlfile(furl_file) self.tub.registerReference(ref, furlFile=temp_furl_file) os.rename(temp_furl_file, furl_file)
class TerminalInteractiveShell(InteractiveShell): autoedit_syntax = CBool(False, config=True) banner = Str('') banner1 = Str(default_banner, config=True) banner2 = Str('', config=True) confirm_exit = CBool(True, config=True) # This display_banner only controls whether or not self.show_banner() # is called when mainloop/interact are called. The default is False # because for the terminal based application, the banner behavior # is controlled by Global.display_banner, which IPythonApp looks at # to determine if *it* should call show_banner() by hand or not. display_banner = CBool(False) # This isn't configurable! embedded = CBool(False) embedded_active = CBool(False) editor = Str(get_default_editor(), config=True) pager = Str('less', config=True) screen_length = Int(0, config=True) term_title = CBool(False, config=True) def __init__(self, config=None, ipython_dir=None, user_ns=None, user_global_ns=None, custom_exceptions=((), None), usage=None, banner1=None, banner2=None, display_banner=None): super(TerminalInteractiveShell, self).__init__(config=config, ipython_dir=ipython_dir, user_ns=user_ns, user_global_ns=user_global_ns, custom_exceptions=custom_exceptions) self.init_term_title() self.init_usage(usage) self.init_banner(banner1, banner2, display_banner) #------------------------------------------------------------------------- # Things related to the terminal #------------------------------------------------------------------------- @property def usable_screen_length(self): if self.screen_length == 0: return 0 else: num_lines_bot = self.separate_in.count('\n') + 1 return self.screen_length - num_lines_bot def init_term_title(self): # Enable or disable the terminal title. if self.term_title: toggle_set_term_title(True) set_term_title('IPython: ' + abbrev_cwd()) else: toggle_set_term_title(False) #------------------------------------------------------------------------- # Things related to aliases #------------------------------------------------------------------------- def init_alias(self): # The parent class defines aliases that can be safely used with any # frontend. super(TerminalInteractiveShell, self).init_alias() # Now define aliases that only make sense on the terminal, because they # need direct access to the console in a way that we can't emulate in # GUI or web frontend if os.name == 'posix': aliases = [('clear', 'clear'), ('more', 'more'), ('less', 'less'), ('man', 'man')] elif os.name == 'nt': aliases = [('cls', 'cls')] for name, cmd in aliases: self.alias_manager.define_alias(name, cmd) #------------------------------------------------------------------------- # Things related to the banner and usage #------------------------------------------------------------------------- def _banner1_changed(self): self.compute_banner() def _banner2_changed(self): self.compute_banner() def _term_title_changed(self, name, new_value): self.init_term_title() def init_banner(self, banner1, banner2, display_banner): if banner1 is not None: self.banner1 = banner1 if banner2 is not None: self.banner2 = banner2 if display_banner is not None: self.display_banner = display_banner self.compute_banner() def show_banner(self, banner=None): if banner is None: banner = self.banner self.write(banner) def compute_banner(self): self.banner = self.banner1 if self.profile: self.banner += '\nIPython profile: %s\n' % self.profile if self.banner2: self.banner += '\n' + self.banner2 def init_usage(self, usage=None): if usage is None: self.usage = interactive_usage else: self.usage = usage #------------------------------------------------------------------------- # Mainloop and code execution logic #------------------------------------------------------------------------- def mainloop(self, display_banner=None): """Start the mainloop. If an optional banner argument is given, it will override the internally created default banner. """ with nested(self.builtin_trap, self.display_trap): # if you run stuff with -c <cmd>, raw hist is not updated # ensure that it's in sync self.history_manager.sync_inputs() while 1: try: self.interact(display_banner=display_banner) #self.interact_with_readline() # XXX for testing of a readline-decoupled repl loop, call # interact_with_readline above break except KeyboardInterrupt: # this should not be necessary, but KeyboardInterrupt # handling seems rather unpredictable... self.write("\nKeyboardInterrupt in interact()\n") def interact(self, display_banner=None): """Closely emulate the interactive Python console.""" # batch run -> do not interact if self.exit_now: return if display_banner is None: display_banner = self.display_banner if display_banner: self.show_banner() more = False # Mark activity in the builtins __builtin__.__dict__['__IPYTHON__active'] += 1 if self.has_readline: self.readline_startup_hook(self.pre_readline) # exit_now is set by a call to %Exit or %Quit, through the # ask_exit callback. while not self.exit_now: self.hooks.pre_prompt_hook() if more: try: prompt = self.hooks.generate_prompt(True) except: self.showtraceback() if self.autoindent: self.rl_do_indent = True else: try: prompt = self.hooks.generate_prompt(False) except: self.showtraceback() try: line = self.raw_input(prompt) if self.exit_now: # quick exit on sys.std[in|out] close break if self.autoindent: self.rl_do_indent = False except KeyboardInterrupt: #double-guard against keyboardinterrupts during kbdint handling try: self.write('\nKeyboardInterrupt\n') self.resetbuffer() more = False except KeyboardInterrupt: pass except EOFError: if self.autoindent: self.rl_do_indent = False if self.has_readline: self.readline_startup_hook(None) self.write('\n') self.exit() except bdb.BdbQuit: warn( 'The Python debugger has exited with a BdbQuit exception.\n' 'Because of how pdb handles the stack, it is impossible\n' 'for IPython to properly format this particular exception.\n' 'IPython will resume normal operation.') except: # exceptions here are VERY RARE, but they can be triggered # asynchronously by signal handlers, for example. self.showtraceback() else: self.input_splitter.push(line) more = self.input_splitter.push_accepts_more() if (self.SyntaxTB.last_syntax_error and self.autoedit_syntax): self.edit_syntax_error() if not more: source_raw = self.input_splitter.source_raw_reset()[1] self.run_cell(source_raw) # We are off again... __builtin__.__dict__['__IPYTHON__active'] -= 1 # Turn off the exit flag, so the mainloop can be restarted if desired self.exit_now = False def raw_input(self, prompt='', continue_prompt=False): """Write a prompt and read a line. The returned line does not include the trailing newline. When the user enters the EOF key sequence, EOFError is raised. Optional inputs: - prompt(''): a string to be printed to prompt the user. - continue_prompt(False): whether this line is the first one or a continuation in a sequence of inputs. """ # Code run by the user may have modified the readline completer state. # We must ensure that our completer is back in place. if self.has_readline: self.set_readline_completer() try: line = raw_input_original(prompt).decode(self.stdin_encoding) except ValueError: warn("\n********\nYou or a %run:ed script called sys.stdin.close()" " or sys.stdout.close()!\nExiting IPython!") self.ask_exit() return "" # Try to be reasonably smart about not re-indenting pasted input more # than necessary. We do this by trimming out the auto-indent initial # spaces, if the user's actual input started itself with whitespace. if self.autoindent: if num_ini_spaces(line) > self.indent_current_nsp: line = line[self.indent_current_nsp:] self.indent_current_nsp = 0 # store the unfiltered input before the user has any chance to modify # it. if line.strip(): if continue_prompt: if self.has_readline and self.readline_use: histlen = self.readline.get_current_history_length() if histlen > 1: newhist = self.history_manager.input_hist_raw[ -1].rstrip() self.readline.remove_history_item(histlen - 1) self.readline.replace_history_item( histlen - 2, newhist.encode(self.stdin_encoding)) else: self.history_manager.input_hist_raw.append('%s\n' % line) elif not continue_prompt: self.history_manager.input_hist_raw.append('\n') try: lineout = self.prefilter_manager.prefilter_lines( line, continue_prompt) except: # blanket except, in case a user-defined prefilter crashes, so it # can't take all of ipython with it. self.showtraceback() return '' else: return lineout def raw_input(self, prompt=''): """Write a prompt and read a line. The returned line does not include the trailing newline. When the user enters the EOF key sequence, EOFError is raised. Optional inputs: - prompt(''): a string to be printed to prompt the user. - continue_prompt(False): whether this line is the first one or a continuation in a sequence of inputs. """ # Code run by the user may have modified the readline completer state. # We must ensure that our completer is back in place. if self.has_readline: self.set_readline_completer() try: line = raw_input_original(prompt).decode(self.stdin_encoding) except ValueError: warn("\n********\nYou or a %run:ed script called sys.stdin.close()" " or sys.stdout.close()!\nExiting IPython!") self.ask_exit() return "" # Try to be reasonably smart about not re-indenting pasted input more # than necessary. We do this by trimming out the auto-indent initial # spaces, if the user's actual input started itself with whitespace. if self.autoindent: if num_ini_spaces(line) > self.indent_current_nsp: line = line[self.indent_current_nsp:] self.indent_current_nsp = 0 return line #------------------------------------------------------------------------- # Methods to support auto-editing of SyntaxErrors. #------------------------------------------------------------------------- def edit_syntax_error(self): """The bottom half of the syntax error handler called in the main loop. Loop until syntax error is fixed or user cancels. """ while self.SyntaxTB.last_syntax_error: # copy and clear last_syntax_error err = self.SyntaxTB.clear_err_state() if not self._should_recompile(err): return try: # may set last_syntax_error again if a SyntaxError is raised self.safe_execfile(err.filename, self.user_ns) except: self.showtraceback() else: try: f = file(err.filename) try: # This should be inside a display_trap block and I # think it is. sys.displayhook(f.read()) finally: f.close() except: self.showtraceback() def _should_recompile(self, e): """Utility routine for edit_syntax_error""" if e.filename in ('<ipython console>', '<input>', '<string>', '<console>', '<BackgroundJob compilation>', None): return False try: if (self.autoedit_syntax and not self.ask_yes_no( 'Return to editor to correct syntax error? ' '[Y/n] ', 'y')): return False except EOFError: return False def int0(x): try: return int(x) except TypeError: return 0 # always pass integer line and offset values to editor hook try: self.hooks.fix_error_editor(e.filename, int0(e.lineno), int0(e.offset), e.msg) except TryNext: warn('Could not open editor') return False return True #------------------------------------------------------------------------- # Things related to GUI support and pylab #------------------------------------------------------------------------- def enable_pylab(self, gui=None): """Activate pylab support at runtime. This turns on support for matplotlib, preloads into the interactive namespace all of numpy and pylab, and configures IPython to correcdtly interact with the GUI event loop. The GUI backend to be used can be optionally selected with the optional :param:`gui` argument. Parameters ---------- gui : optional, string If given, dictates the choice of matplotlib GUI backend to use (should be one of IPython's supported backends, 'tk', 'qt', 'wx' or 'gtk'), otherwise we use the default chosen by matplotlib (as dictated by the matplotlib build-time options plus the user's matplotlibrc configuration file). """ # We want to prevent the loading of pylab to pollute the user's # namespace as shown by the %who* magics, so we execute the activation # code in an empty namespace, and we update *both* user_ns and # user_ns_hidden with this information. ns = {} gui = pylab_activate(ns, gui) self.user_ns.update(ns) self.user_ns_hidden.update(ns) # Now we must activate the gui pylab wants to use, and fix %run to take # plot updates into account enable_gui(gui) self.magic_run = self._pylab_magic_run #------------------------------------------------------------------------- # Things related to exiting #------------------------------------------------------------------------- def ask_exit(self): """ Ask the shell to exit. Can be overiden and used as a callback. """ self.exit_now = True def exit(self): """Handle interactive exit. This method calls the ask_exit callback.""" if self.confirm_exit: if self.ask_yes_no('Do you really want to exit ([y]/n)?', 'y'): self.ask_exit() else: self.ask_exit() #------------------------------------------------------------------------ # Magic overrides #------------------------------------------------------------------------ # Once the base class stops inheriting from magic, this code needs to be # moved into a separate machinery as well. For now, at least isolate here # the magics which this class needs to implement differently from the base # class, or that are unique to it. def magic_autoindent(self, parameter_s=''): """Toggle autoindent on/off (if available).""" self.shell.set_autoindent() print "Automatic indentation is:", ['OFF', 'ON'][self.shell.autoindent] def magic_cpaste(self, parameter_s=''): """Paste & execute a pre-formatted code block from clipboard. You must terminate the block with '--' (two minus-signs) alone on the line. You can also provide your own sentinel with '%paste -s %%' ('%%' is the new sentinel for this operation) The block is dedented prior to execution to enable execution of method definitions. '>' and '+' characters at the beginning of a line are ignored, to allow pasting directly from e-mails, diff files and doctests (the '...' continuation prompt is also stripped). The executed block is also assigned to variable named 'pasted_block' for later editing with '%edit pasted_block'. You can also pass a variable name as an argument, e.g. '%cpaste foo'. This assigns the pasted block to variable 'foo' as string, without dedenting or executing it (preceding >>> and + is still stripped) '%cpaste -r' re-executes the block previously entered by cpaste. Do not be alarmed by garbled output on Windows (it's a readline bug). Just press enter and type -- (and press enter again) and the block will be what was just pasted. IPython statements (magics, shell escapes) are not supported (yet). See also -------- paste: automatically pull code from clipboard. Examples -------- :: In [8]: %cpaste Pasting code; enter '--' alone on the line to stop. :>>> a = ["world!", "Hello"] :>>> print " ".join(sorted(a)) :-- Hello world! """ opts, args = self.parse_options(parameter_s, 'rs:', mode='string') par = args.strip() if opts.has_key('r'): self._rerun_pasted() return sentinel = opts.get('s', '--') block = self._strip_pasted_lines_for_code( self._get_pasted_lines(sentinel)) self._execute_block(block, par) def magic_paste(self, parameter_s=''): """Paste & execute a pre-formatted code block from clipboard. The text is pulled directly from the clipboard without user intervention and printed back on the screen before execution (unless the -q flag is given to force quiet mode). The block is dedented prior to execution to enable execution of method definitions. '>' and '+' characters at the beginning of a line are ignored, to allow pasting directly from e-mails, diff files and doctests (the '...' continuation prompt is also stripped). The executed block is also assigned to variable named 'pasted_block' for later editing with '%edit pasted_block'. You can also pass a variable name as an argument, e.g. '%paste foo'. This assigns the pasted block to variable 'foo' as string, without dedenting or executing it (preceding >>> and + is still stripped) Options ------- -r: re-executes the block previously entered by cpaste. -q: quiet mode: do not echo the pasted text back to the terminal. IPython statements (magics, shell escapes) are not supported (yet). See also -------- cpaste: manually paste code into terminal until you mark its end. """ opts, args = self.parse_options(parameter_s, 'rq', mode='string') par = args.strip() if opts.has_key('r'): self._rerun_pasted() return text = self.shell.hooks.clipboard_get() block = self._strip_pasted_lines_for_code(text.splitlines()) # By default, echo back to terminal unless quiet mode is requested if not opts.has_key('q'): write = self.shell.write write(self.shell.pycolorize(block)) if not block.endswith('\n'): write('\n') write("## -- End pasted text --\n") self._execute_block(block, par)
class Client(HasTraits): """A semi-synchronous client to the IPython ZMQ cluster Parameters ---------- url_or_file : bytes; zmq url or path to ipcontroller-client.json Connection information for the Hub's registration. If a json connector file is given, then likely no further configuration is necessary. [Default: use profile] profile : bytes The name of the Cluster profile to be used to find connector information. [Default: 'default'] context : zmq.Context Pass an existing zmq.Context instance, otherwise the client will create its own. username : bytes set username to be passed to the Session object debug : bool flag for lots of message printing for debug purposes #-------------- ssh related args ---------------- # These are args for configuring the ssh tunnel to be used # credentials are used to forward connections over ssh to the Controller # Note that the ip given in `addr` needs to be relative to sshserver # The most basic case is to leave addr as pointing to localhost (127.0.0.1), # and set sshserver as the same machine the Controller is on. However, # the only requirement is that sshserver is able to see the Controller # (i.e. is within the same trusted network). sshserver : str A string of the form passed to ssh, i.e. 'server.tld' or '[email protected]:port' If keyfile or password is specified, and this is not, it will default to the ip given in addr. sshkey : str; path to public ssh key file This specifies a key to be used in ssh login, default None. Regular default ssh keys will be used without specifying this argument. password : str Your ssh password to sshserver. Note that if this is left None, you will be prompted for it if passwordless key based login is unavailable. paramiko : bool flag for whether to use paramiko instead of shell ssh for tunneling. [default: True on win32, False else] ------- exec authentication args ------- If even localhost is untrusted, you can have some protection against unauthorized execution by using a key. Messages are still sent as cleartext, so if someone can snoop your loopback traffic this will not help against malicious attacks. exec_key : str an authentication key or file containing a key default: None Attributes ---------- ids : list of int engine IDs requesting the ids attribute always synchronizes the registration state. To request ids without synchronization, use semi-private _ids attributes. history : list of msg_ids a list of msg_ids, keeping track of all the execution messages you have submitted in order. outstanding : set of msg_ids a set of msg_ids that have been submitted, but whose results have not yet been received. results : dict a dict of all our results, keyed by msg_id block : bool determines default behavior when block not specified in execution methods Methods ------- spin flushes incoming results and registration state changes control methods spin, and requesting `ids` also ensures up to date wait wait on one or more msg_ids execution methods apply legacy: execute, run data movement push, pull, scatter, gather query methods queue_status, get_result, purge, result_status control methods abort, shutdown """ block = Bool(False) outstanding = Set() results = Instance('collections.defaultdict', (dict, )) metadata = Instance('collections.defaultdict', (Metadata, )) history = List() debug = Bool(False) profile = CUnicode('default') _outstanding_dict = Instance('collections.defaultdict', (set, )) _ids = List() _connected = Bool(False) _ssh = Bool(False) _context = Instance('zmq.Context') _config = Dict() _engines = Instance(util.ReverseDict, (), {}) # _hub_socket=Instance('zmq.Socket') _query_socket = Instance('zmq.Socket') _control_socket = Instance('zmq.Socket') _iopub_socket = Instance('zmq.Socket') _notification_socket = Instance('zmq.Socket') _mux_socket = Instance('zmq.Socket') _task_socket = Instance('zmq.Socket') _task_scheme = Str() _closed = False _ignored_control_replies = Int(0) _ignored_hub_replies = Int(0) def __init__(self, url_or_file=None, profile='default', cluster_dir=None, ipython_dir=None, context=None, username=None, debug=False, exec_key=None, sshserver=None, sshkey=None, password=None, paramiko=None, timeout=10): super(Client, self).__init__(debug=debug, profile=profile) if context is None: context = zmq.Context.instance() self._context = context self._setup_cluster_dir(profile, cluster_dir, ipython_dir) if self._cd is not None: if url_or_file is None: url_or_file = pjoin(self._cd.security_dir, 'ipcontroller-client.json') assert url_or_file is not None, "I can't find enough information to connect to a hub!"\ " Please specify at least one of url_or_file or profile." try: util.validate_url(url_or_file) except AssertionError: if not os.path.exists(url_or_file): if self._cd: url_or_file = os.path.join(self._cd.security_dir, url_or_file) assert os.path.exists( url_or_file ), "Not a valid connection file or url: %r" % url_or_file with open(url_or_file) as f: cfg = json.loads(f.read()) else: cfg = {'url': url_or_file} # sync defaults from args, json: if sshserver: cfg['ssh'] = sshserver if exec_key: cfg['exec_key'] = exec_key exec_key = cfg['exec_key'] sshserver = cfg['ssh'] url = cfg['url'] location = cfg.setdefault('location', None) cfg['url'] = util.disambiguate_url(cfg['url'], location) url = cfg['url'] self._config = cfg self._ssh = bool(sshserver or sshkey or password) if self._ssh and sshserver is None: # default to ssh via localhost sshserver = url.split('://')[1].split(':')[0] if self._ssh and password is None: if tunnel.try_passwordless_ssh(sshserver, sshkey, paramiko): password = False else: password = getpass("SSH Password for %s: " % sshserver) ssh_kwargs = dict(keyfile=sshkey, password=password, paramiko=paramiko) if exec_key is not None and os.path.isfile(exec_key): arg = 'keyfile' else: arg = 'key' key_arg = {arg: exec_key} if username is None: self.session = ss.StreamSession(**key_arg) else: self.session = ss.StreamSession(username, **key_arg) self._query_socket = self._context.socket(zmq.XREQ) self._query_socket.setsockopt(zmq.IDENTITY, self.session.session) if self._ssh: tunnel.tunnel_connection(self._query_socket, url, sshserver, **ssh_kwargs) else: self._query_socket.connect(url) self.session.debug = self.debug self._notification_handlers = { 'registration_notification': self._register_engine, 'unregistration_notification': self._unregister_engine, 'shutdown_notification': lambda msg: self.close(), } self._queue_handlers = { 'execute_reply': self._handle_execute_reply, 'apply_reply': self._handle_apply_reply } self._connect(sshserver, ssh_kwargs, timeout) def __del__(self): """cleanup sockets, but _not_ context.""" self.close() def _setup_cluster_dir(self, profile, cluster_dir, ipython_dir): if ipython_dir is None: ipython_dir = get_ipython_dir() if cluster_dir is not None: try: self._cd = ClusterDir.find_cluster_dir(cluster_dir) return except ClusterDirError: pass elif profile is not None: try: self._cd = ClusterDir.find_cluster_dir_by_profile( ipython_dir, profile) return except ClusterDirError: pass self._cd = None def _update_engines(self, engines): """Update our engines dict and _ids from a dict of the form: {id:uuid}.""" for k, v in engines.iteritems(): eid = int(k) self._engines[eid] = bytes(v) # force not unicode self._ids.append(eid) self._ids = sorted(self._ids) if sorted(self._engines.keys()) != range(len(self._engines)) and \ self._task_scheme == 'pure' and self._task_socket: self._stop_scheduling_tasks() def _stop_scheduling_tasks(self): """Stop scheduling tasks because an engine has been unregistered from a pure ZMQ scheduler. """ self._task_socket.close() self._task_socket = None msg = "An engine has been unregistered, and we are using pure " +\ "ZMQ task scheduling. Task farming will be disabled." if self.outstanding: msg += " If you were running tasks when this happened, " +\ "some `outstanding` msg_ids may never resolve." warnings.warn(msg, RuntimeWarning) def _build_targets(self, targets): """Turn valid target IDs or 'all' into two lists: (int_ids, uuids). """ if targets is None: targets = self._ids elif isinstance(targets, str): if targets.lower() == 'all': targets = self._ids else: raise TypeError("%r not valid str target, must be 'all'" % (targets)) elif isinstance(targets, int): if targets < 0: targets = self.ids[targets] if targets not in self.ids: raise IndexError("No such engine: %i" % targets) targets = [targets] if isinstance(targets, slice): indices = range(len(self._ids))[targets] ids = self.ids targets = [ids[i] for i in indices] if not isinstance(targets, (tuple, list, xrange)): raise TypeError( "targets by int/slice/collection of ints only, not %s" % (type(targets))) return [self._engines[t] for t in targets], list(targets) def _connect(self, sshserver, ssh_kwargs, timeout): """setup all our socket connections to the cluster. This is called from __init__.""" # Maybe allow reconnecting? if self._connected: return self._connected = True def connect_socket(s, url): url = util.disambiguate_url(url, self._config['location']) if self._ssh: return tunnel.tunnel_connection(s, url, sshserver, **ssh_kwargs) else: return s.connect(url) self.session.send(self._query_socket, 'connection_request') r, w, x = zmq.select([self._query_socket], [], [], timeout) if not r: raise error.TimeoutError("Hub connection request timed out") idents, msg = self.session.recv(self._query_socket, mode=0) if self.debug: pprint(msg) msg = ss.Message(msg) content = msg.content self._config['registration'] = dict(content) if content.status == 'ok': if content.mux: self._mux_socket = self._context.socket(zmq.XREQ) self._mux_socket.setsockopt(zmq.IDENTITY, self.session.session) connect_socket(self._mux_socket, content.mux) if content.task: self._task_scheme, task_addr = content.task self._task_socket = self._context.socket(zmq.XREQ) self._task_socket.setsockopt(zmq.IDENTITY, self.session.session) connect_socket(self._task_socket, task_addr) if content.notification: self._notification_socket = self._context.socket(zmq.SUB) connect_socket(self._notification_socket, content.notification) self._notification_socket.setsockopt(zmq.SUBSCRIBE, b'') # if content.query: # self._query_socket = self._context.socket(zmq.XREQ) # self._query_socket.setsockopt(zmq.IDENTITY, self.session.session) # connect_socket(self._query_socket, content.query) if content.control: self._control_socket = self._context.socket(zmq.XREQ) self._control_socket.setsockopt(zmq.IDENTITY, self.session.session) connect_socket(self._control_socket, content.control) if content.iopub: self._iopub_socket = self._context.socket(zmq.SUB) self._iopub_socket.setsockopt(zmq.SUBSCRIBE, b'') self._iopub_socket.setsockopt(zmq.IDENTITY, self.session.session) connect_socket(self._iopub_socket, content.iopub) self._update_engines(dict(content.engines)) else: self._connected = False raise Exception("Failed to connect!") #-------------------------------------------------------------------------- # handlers and callbacks for incoming messages #-------------------------------------------------------------------------- def _unwrap_exception(self, content): """unwrap exception, and remap engine_id to int.""" e = error.unwrap_exception(content) # print e.traceback if e.engine_info: e_uuid = e.engine_info['engine_uuid'] eid = self._engines[e_uuid] e.engine_info['engine_id'] = eid return e def _extract_metadata(self, header, parent, content): md = { 'msg_id': parent['msg_id'], 'received': datetime.now(), 'engine_uuid': header.get('engine', None), 'follow': parent.get('follow', []), 'after': parent.get('after', []), 'status': content['status'], } if md['engine_uuid'] is not None: md['engine_id'] = self._engines.get(md['engine_uuid'], None) if 'date' in parent: md['submitted'] = datetime.strptime(parent['date'], util.ISO8601) if 'started' in header: md['started'] = datetime.strptime(header['started'], util.ISO8601) if 'date' in header: md['completed'] = datetime.strptime(header['date'], util.ISO8601) return md def _register_engine(self, msg): """Register a new engine, and update our connection info.""" content = msg['content'] eid = content['id'] d = {eid: content['queue']} self._update_engines(d) def _unregister_engine(self, msg): """Unregister an engine that has died.""" content = msg['content'] eid = int(content['id']) if eid in self._ids: self._ids.remove(eid) uuid = self._engines.pop(eid) self._handle_stranded_msgs(eid, uuid) if self._task_socket and self._task_scheme == 'pure': self._stop_scheduling_tasks() def _handle_stranded_msgs(self, eid, uuid): """Handle messages known to be on an engine when the engine unregisters. It is possible that this will fire prematurely - that is, an engine will go down after completing a result, and the client will be notified of the unregistration and later receive the successful result. """ outstanding = self._outstanding_dict[uuid] for msg_id in list(outstanding): if msg_id in self.results: # we already continue try: raise error.EngineError( "Engine %r died while running task %r" % (eid, msg_id)) except: content = error.wrap_exception() # build a fake message: parent = {} header = {} parent['msg_id'] = msg_id header['engine'] = uuid header['date'] = datetime.now().strftime(util.ISO8601) msg = dict(parent_header=parent, header=header, content=content) self._handle_apply_reply(msg) def _handle_execute_reply(self, msg): """Save the reply to an execute_request into our results. execute messages are never actually used. apply is used instead. """ parent = msg['parent_header'] msg_id = parent['msg_id'] if msg_id not in self.outstanding: if msg_id in self.history: print("got stale result: %s" % msg_id) else: print("got unknown result: %s" % msg_id) else: self.outstanding.remove(msg_id) self.results[msg_id] = self._unwrap_exception(msg['content']) def _handle_apply_reply(self, msg): """Save the reply to an apply_request into our results.""" parent = msg['parent_header'] msg_id = parent['msg_id'] if msg_id not in self.outstanding: if msg_id in self.history: print("got stale result: %s" % msg_id) print self.results[msg_id] print msg else: print("got unknown result: %s" % msg_id) else: self.outstanding.remove(msg_id) content = msg['content'] header = msg['header'] # construct metadata: md = self.metadata[msg_id] md.update(self._extract_metadata(header, parent, content)) # is this redundant? self.metadata[msg_id] = md e_outstanding = self._outstanding_dict[md['engine_uuid']] if msg_id in e_outstanding: e_outstanding.remove(msg_id) # construct result: if content['status'] == 'ok': self.results[msg_id] = util.unserialize_object(msg['buffers'])[0] elif content['status'] == 'aborted': self.results[msg_id] = error.TaskAborted(msg_id) elif content['status'] == 'resubmitted': # TODO: handle resubmission pass else: self.results[msg_id] = self._unwrap_exception(content) def _flush_notifications(self): """Flush notifications of engine registrations waiting in ZMQ queue.""" msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK) while msg is not None: if self.debug: pprint(msg) msg = msg[-1] msg_type = msg['msg_type'] handler = self._notification_handlers.get(msg_type, None) if handler is None: raise Exception("Unhandled message type: %s" % msg.msg_type) else: handler(msg) msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK) def _flush_results(self, sock): """Flush task or queue results waiting in ZMQ queue.""" msg = self.session.recv(sock, mode=zmq.NOBLOCK) while msg is not None: if self.debug: pprint(msg) msg = msg[-1] msg_type = msg['msg_type'] handler = self._queue_handlers.get(msg_type, None) if handler is None: raise Exception("Unhandled message type: %s" % msg.msg_type) else: handler(msg) msg = self.session.recv(sock, mode=zmq.NOBLOCK) def _flush_control(self, sock): """Flush replies from the control channel waiting in the ZMQ queue. Currently: ignore them.""" if self._ignored_control_replies <= 0: return msg = self.session.recv(sock, mode=zmq.NOBLOCK) while msg is not None: self._ignored_control_replies -= 1 if self.debug: pprint(msg) msg = self.session.recv(sock, mode=zmq.NOBLOCK) def _flush_ignored_control(self): """flush ignored control replies""" while self._ignored_control_replies > 0: self.session.recv(self._control_socket) self._ignored_control_replies -= 1 def _flush_ignored_hub_replies(self): msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK) while msg is not None: msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK) def _flush_iopub(self, sock): """Flush replies from the iopub channel waiting in the ZMQ queue. """ msg = self.session.recv(sock, mode=zmq.NOBLOCK) while msg is not None: if self.debug: pprint(msg) msg = msg[-1] parent = msg['parent_header'] msg_id = parent['msg_id'] content = msg['content'] header = msg['header'] msg_type = msg['msg_type'] # init metadata: md = self.metadata[msg_id] if msg_type == 'stream': name = content['name'] s = md[name] or '' md[name] = s + content['data'] elif msg_type == 'pyerr': md.update({'pyerr': self._unwrap_exception(content)}) elif msg_type == 'pyin': md.update({'pyin': content['code']}) else: md.update({msg_type: content.get('data', '')}) # reduntant? self.metadata[msg_id] = md msg = self.session.recv(sock, mode=zmq.NOBLOCK) #-------------------------------------------------------------------------- # len, getitem #-------------------------------------------------------------------------- def __len__(self): """len(client) returns # of engines.""" return len(self.ids) def __getitem__(self, key): """index access returns DirectView multiplexer objects Must be int, slice, or list/tuple/xrange of ints""" if not isinstance(key, (int, slice, tuple, list, xrange)): raise TypeError("key by int/slice/iterable of ints only, not %s" % (type(key))) else: return self.direct_view(key) #-------------------------------------------------------------------------- # Begin public methods #-------------------------------------------------------------------------- @property def ids(self): """Always up-to-date ids property.""" self._flush_notifications() # always copy: return list(self._ids) def close(self): if self._closed: return snames = filter(lambda n: n.endswith('socket'), dir(self)) for socket in map(lambda name: getattr(self, name), snames): if isinstance(socket, zmq.Socket) and not socket.closed: socket.close() self._closed = True def spin(self): """Flush any registration notifications and execution results waiting in the ZMQ queue. """ if self._notification_socket: self._flush_notifications() if self._mux_socket: self._flush_results(self._mux_socket) if self._task_socket: self._flush_results(self._task_socket) if self._control_socket: self._flush_control(self._control_socket) if self._iopub_socket: self._flush_iopub(self._iopub_socket) if self._query_socket: self._flush_ignored_hub_replies() def wait(self, jobs=None, timeout=-1): """waits on one or more `jobs`, for up to `timeout` seconds. Parameters ---------- jobs : int, str, or list of ints and/or strs, or one or more AsyncResult objects ints are indices to self.history strs are msg_ids default: wait on all outstanding messages timeout : float a time in seconds, after which to give up. default is -1, which means no timeout Returns ------- True : when all msg_ids are done False : timeout reached, some msg_ids still outstanding """ tic = time.time() if jobs is None: theids = self.outstanding else: if isinstance(jobs, (int, str, AsyncResult)): jobs = [jobs] theids = set() for job in jobs: if isinstance(job, int): # index access job = self.history[job] elif isinstance(job, AsyncResult): map(theids.add, job.msg_ids) continue theids.add(job) if not theids.intersection(self.outstanding): return True self.spin() while theids.intersection(self.outstanding): if timeout >= 0 and (time.time() - tic) > timeout: break time.sleep(1e-3) self.spin() return len(theids.intersection(self.outstanding)) == 0 #-------------------------------------------------------------------------- # Control methods #-------------------------------------------------------------------------- @spin_first @default_block def clear(self, targets=None, block=None): """Clear the namespace in target(s).""" targets = self._build_targets(targets)[0] for t in targets: self.session.send(self._control_socket, 'clear_request', content={}, ident=t) error = False if self.block: self._flush_ignored_control() for i in range(len(targets)): idents, msg = self.session.recv(self._control_socket, 0) if self.debug: pprint(msg) if msg['content']['status'] != 'ok': error = self._unwrap_exception(msg['content']) else: self._ignored_control_replies += len(targets) if error: raise error @spin_first @default_block def abort(self, jobs=None, targets=None, block=None): """Abort specific jobs from the execution queues of target(s). This is a mechanism to prevent jobs that have already been submitted from executing. Parameters ---------- jobs : msg_id, list of msg_ids, or AsyncResult The jobs to be aborted """ targets = self._build_targets(targets)[0] msg_ids = [] if isinstance(jobs, (basestring, AsyncResult)): jobs = [jobs] bad_ids = filter( lambda obj: not isinstance(obj, (basestring, AsyncResult)), jobs) if bad_ids: raise TypeError( "Invalid msg_id type %r, expected str or AsyncResult" % bad_ids[0]) for j in jobs: if isinstance(j, AsyncResult): msg_ids.extend(j.msg_ids) else: msg_ids.append(j) content = dict(msg_ids=msg_ids) for t in targets: self.session.send(self._control_socket, 'abort_request', content=content, ident=t) error = False if self.block: self._flush_ignored_control() for i in range(len(targets)): idents, msg = self.session.recv(self._control_socket, 0) if self.debug: pprint(msg) if msg['content']['status'] != 'ok': error = self._unwrap_exception(msg['content']) else: self._ignored_control_replies += len(targets) if error: raise error @spin_first @default_block def shutdown(self, targets=None, restart=False, hub=False, block=None): """Terminates one or more engine processes, optionally including the hub.""" if hub: targets = 'all' targets = self._build_targets(targets)[0] for t in targets: self.session.send(self._control_socket, 'shutdown_request', content={'restart': restart}, ident=t) error = False if block or hub: self._flush_ignored_control() for i in range(len(targets)): idents, msg = self.session.recv(self._control_socket, 0) if self.debug: pprint(msg) if msg['content']['status'] != 'ok': error = self._unwrap_exception(msg['content']) else: self._ignored_control_replies += len(targets) if hub: time.sleep(0.25) self.session.send(self._query_socket, 'shutdown_request') idents, msg = self.session.recv(self._query_socket, 0) if self.debug: pprint(msg) if msg['content']['status'] != 'ok': error = self._unwrap_exception(msg['content']) if error: raise error #-------------------------------------------------------------------------- # Execution methods #-------------------------------------------------------------------------- @default_block def _execute(self, code, targets='all', block=None): """Executes `code` on `targets` in blocking or nonblocking manner. ``execute`` is always `bound` (affects engine namespace) Parameters ---------- code : str the code string to be executed targets : int/str/list of ints/strs the engines on which to execute default : all block : bool whether or not to wait until done to return default: self.block """ return self[targets].execute(code, block=block) def _maybe_raise(self, result): """wrapper for maybe raising an exception if apply failed.""" if isinstance(result, error.RemoteError): raise result return result def send_apply_message(self, socket, f, args=None, kwargs=None, subheader=None, track=False, ident=None): """construct and send an apply message via a socket. This is the principal method with which all engine execution is performed by views. """ assert not self._closed, "cannot use me anymore, I'm closed!" # defaults: args = args if args is not None else [] kwargs = kwargs if kwargs is not None else {} subheader = subheader if subheader is not None else {} # validate arguments if not callable(f): raise TypeError("f must be callable, not %s" % type(f)) if not isinstance(args, (tuple, list)): raise TypeError("args must be tuple or list, not %s" % type(args)) if not isinstance(kwargs, dict): raise TypeError("kwargs must be dict, not %s" % type(kwargs)) if not isinstance(subheader, dict): raise TypeError("subheader must be dict, not %s" % type(subheader)) if not self._ids: # flush notification socket if no engines yet any_ids = self.ids if not any_ids: raise error.NoEnginesRegistered( "Can't execute without any connected engines.") # enforce types of f,args,kwargs bufs = util.pack_apply_message(f, args, kwargs) msg = self.session.send(socket, "apply_request", buffers=bufs, ident=ident, subheader=subheader, track=track) msg_id = msg['msg_id'] self.outstanding.add(msg_id) if ident: # possibly routed to a specific engine if isinstance(ident, list): ident = ident[-1] if ident in self._engines.values(): # save for later, in case of engine death self._outstanding_dict[ident].add(msg_id) self.history.append(msg_id) self.metadata[msg_id]['submitted'] = datetime.now() return msg #-------------------------------------------------------------------------- # construct a View object #-------------------------------------------------------------------------- def load_balanced_view(self, targets=None): """construct a DirectView object. If no arguments are specified, create a LoadBalancedView using all engines. Parameters ---------- targets: list,slice,int,etc. [default: use all engines] The subset of engines across which to load-balance """ if targets is not None: targets = self._build_targets(targets)[1] return LoadBalancedView(client=self, socket=self._task_socket, targets=targets) def direct_view(self, targets='all'): """construct a DirectView object. If no targets are specified, create a DirectView using all engines. Parameters ---------- targets: list,slice,int,etc. [default: use all engines] The engines to use for the View """ single = isinstance(targets, int) targets = self._build_targets(targets)[1] if single: targets = targets[0] return DirectView(client=self, socket=self._mux_socket, targets=targets) #-------------------------------------------------------------------------- # Data movement (TO BE REMOVED) #-------------------------------------------------------------------------- @default_block def _push(self, ns, targets='all', block=None, track=False): """Push the contents of `ns` into the namespace on `target`""" if not isinstance(ns, dict): raise TypeError("Must be a dict, not %s" % type(ns)) result = self.apply(util._push, kwargs=ns, targets=targets, block=block, bound=True, balanced=False, track=track) if not block: return result @default_block def _pull(self, keys, targets='all', block=None): """Pull objects from `target`'s namespace by `keys`""" if isinstance(keys, basestring): pass elif isinstance(keys, (list, tuple, set)): for key in keys: if not isinstance(key, basestring): raise TypeError("keys must be str, not type %r" % type(key)) else: raise TypeError("keys must be strs, not %r" % keys) result = self.apply(util._pull, (keys, ), targets=targets, block=block, bound=True, balanced=False) return result #-------------------------------------------------------------------------- # Query methods #-------------------------------------------------------------------------- @spin_first @default_block def get_result(self, indices_or_msg_ids=None, block=None): """Retrieve a result by msg_id or history index, wrapped in an AsyncResult object. If the client already has the results, no request to the Hub will be made. This is a convenient way to construct AsyncResult objects, which are wrappers that include metadata about execution, and allow for awaiting results that were not submitted by this Client. It can also be a convenient way to retrieve the metadata associated with blocking execution, since it always retrieves Examples -------- :: In [10]: r = client.apply() Parameters ---------- indices_or_msg_ids : integer history index, str msg_id, or list of either The indices or msg_ids of indices to be retrieved block : bool Whether to wait for the result to be done Returns ------- AsyncResult A single AsyncResult object will always be returned. AsyncHubResult A subclass of AsyncResult that retrieves results from the Hub """ if indices_or_msg_ids is None: indices_or_msg_ids = -1 if not isinstance(indices_or_msg_ids, (list, tuple)): indices_or_msg_ids = [indices_or_msg_ids] theids = [] for id in indices_or_msg_ids: if isinstance(id, int): id = self.history[id] if not isinstance(id, str): raise TypeError("indices must be str or int, not %r" % id) theids.append(id) local_ids = filter( lambda msg_id: msg_id in self.history or msg_id in self.results, theids) remote_ids = filter(lambda msg_id: msg_id not in local_ids, theids) if remote_ids: ar = AsyncHubResult(self, msg_ids=theids) else: ar = AsyncResult(self, msg_ids=theids) if block: ar.wait() return ar @spin_first def result_status(self, msg_ids, status_only=True): """Check on the status of the result(s) of the apply request with `msg_ids`. If status_only is False, then the actual results will be retrieved, else only the status of the results will be checked. Parameters ---------- msg_ids : list of msg_ids if int: Passed as index to self.history for convenience. status_only : bool (default: True) if False: Retrieve the actual results of completed tasks. Returns ------- results : dict There will always be the keys 'pending' and 'completed', which will be lists of msg_ids that are incomplete or complete. If `status_only` is False, then completed results will be keyed by their `msg_id`. """ if not isinstance(msg_ids, (list, tuple)): msg_ids = [msg_ids] theids = [] for msg_id in msg_ids: if isinstance(msg_id, int): msg_id = self.history[msg_id] if not isinstance(msg_id, basestring): raise TypeError("msg_ids must be str, not %r" % msg_id) theids.append(msg_id) completed = [] local_results = {} # comment this block out to temporarily disable local shortcut: for msg_id in theids: if msg_id in self.results: completed.append(msg_id) local_results[msg_id] = self.results[msg_id] theids.remove(msg_id) if theids: # some not locally cached content = dict(msg_ids=theids, status_only=status_only) msg = self.session.send(self._query_socket, "result_request", content=content) zmq.select([self._query_socket], [], []) idents, msg = self.session.recv(self._query_socket, zmq.NOBLOCK) if self.debug: pprint(msg) content = msg['content'] if content['status'] != 'ok': raise self._unwrap_exception(content) buffers = msg['buffers'] else: content = dict(completed=[], pending=[]) content['completed'].extend(completed) if status_only: return content failures = [] # load cached results into result: content.update(local_results) # update cache with results: for msg_id in sorted(theids): if msg_id in content['completed']: rec = content[msg_id] parent = rec['header'] header = rec['result_header'] rcontent = rec['result_content'] iodict = rec['io'] if isinstance(rcontent, str): rcontent = self.session.unpack(rcontent) md = self.metadata[msg_id] md.update(self._extract_metadata(header, parent, rcontent)) md.update(iodict) if rcontent['status'] == 'ok': res, buffers = util.unserialize_object(buffers) else: print rcontent res = self._unwrap_exception(rcontent) failures.append(res) self.results[msg_id] = res content[msg_id] = res if len(theids) == 1 and failures: raise failures[0] error.collect_exceptions(failures, "result_status") return content @spin_first def queue_status(self, targets='all', verbose=False): """Fetch the status of engine queues. Parameters ---------- targets : int/str/list of ints/strs the engines whose states are to be queried. default : all verbose : bool Whether to return lengths only, or lists of ids for each element """ engine_ids = self._build_targets(targets)[1] content = dict(targets=engine_ids, verbose=verbose) self.session.send(self._query_socket, "queue_request", content=content) idents, msg = self.session.recv(self._query_socket, 0) if self.debug: pprint(msg) content = msg['content'] status = content.pop('status') if status != 'ok': raise self._unwrap_exception(content) content = util.rekey(content) if isinstance(targets, int): return content[targets] else: return content @spin_first def purge_results(self, jobs=[], targets=[]): """Tell the Hub to forget results. Individual results can be purged by msg_id, or the entire history of specific targets can be purged. Parameters ---------- jobs : str or list of str or AsyncResult objects the msg_ids whose results should be forgotten. targets : int/str/list of ints/strs The targets, by uuid or int_id, whose entire history is to be purged. Use `targets='all'` to scrub everything from the Hub's memory. default : None """ if not targets and not jobs: raise ValueError( "Must specify at least one of `targets` and `jobs`") if targets: targets = self._build_targets(targets)[1] # construct msg_ids from jobs msg_ids = [] if isinstance(jobs, (basestring, AsyncResult)): jobs = [jobs] bad_ids = filter( lambda obj: not isinstance(obj, (basestring, AsyncResult)), jobs) if bad_ids: raise TypeError( "Invalid msg_id type %r, expected str or AsyncResult" % bad_ids[0]) for j in jobs: if isinstance(j, AsyncResult): msg_ids.extend(j.msg_ids) else: msg_ids.append(j) content = dict(targets=targets, msg_ids=msg_ids) self.session.send(self._query_socket, "purge_request", content=content) idents, msg = self.session.recv(self._query_socket, 0) if self.debug: pprint(msg) content = msg['content'] if content['status'] != 'ok': raise self._unwrap_exception(content)