def register(self, editor): if editor.uuid in self.__editors: raise ValueError("Editor uuid %s already registered", editor.uuid) self.__editors[editor.uuid] = editor if editor.uuid == self.__pref_editor_uuid and not self.__custom_editor_set: self.__on_editor_changed() dispatcher.send(sender=self)
def __idle_emit_cmd_metadata(self): _logger.debug("signalling command metadata") self.__cmd_metadata_lock.acquire() self.__idle_emit_cmd_metadata_id = 0 meta_ref = self.__cmd_metadata self.__cmd_metadata = {} self.__cmd_metadata_lock.release() for (cmd,cmdidx,key),(flags,meta) in meta_ref.items(): dispatcher.send('metadata', self, cmdidx, cmd, key, flags, meta)
def __idle_emit_cmd_metadata(self): _logger.debug("signalling command metadata") self.__cmd_metadata_lock.acquire() self.__idle_emit_cmd_metadata_id = 0 meta_ref = self.__cmd_metadata self.__cmd_metadata = {} self.__cmd_metadata_lock.release() for (cmd, cmdidx, key), (flags, meta) in meta_ref.items(): dispatcher.send('metadata', self, cmdidx, cmd, key, flags, meta)
def chdir(self, dpath): if not isinstance(dpath, str): dpath = str(dpath, 'utf-8') dpath = os.path.expanduser(dpath) newcwd = os.path.isabs(dpath) and dpath or posixpath.join(self.__cwd, dpath) newcwd = path_normalize(newcwd) _logger.debug("chdir: %s post-normalize: %s", dpath, newcwd) os.stat(newcwd) # lose on nonexistent self.__cwd = newcwd dispatcher.send('cwd', self, newcwd) return self.__cwd
def chdir(self, dpath): if not isinstance(dpath, unicode): dpath = unicode(dpath, 'utf-8') dpath = os.path.expanduser(dpath) newcwd = os.path.isabs(dpath) and dpath or posixpath.join(self.__cwd, dpath) newcwd = path_normalize(newcwd) _logger.debug("chdir: %s post-normalize: %s", dpath, newcwd) os.stat(newcwd) # lose on nonexistent self.__cwd = newcwd dispatcher.send('cwd', self, newcwd) return self.__cwd
def __on_cmd_exception(self, e, sender=None): cmd = sender if not self.__state == 'executing': return try: self.cancel(changestate=False) except: _logger.exception("Nested exception while cancelling") pass dispatcher.send('exception', self, e, cmd) self.__exception_info = (e.__class__, str(e), cmd, traceback.format_exc()) self.__set_state('exception')
def __set_state(self, state): trans = self.validate_state_transition(state) if trans is None: _logger.debug("ignoring transition from state %s to %s", self.__state, state) return elif not trans: raise ValueError("Invalid state transition %s to %s", self.__state, state) self.__state = state if self.is_complete(): self.__completion_time = time.time() dispatcher.send('state-changed', self)
def add(self, path): if path in self.__bookmarks: return self.__bookmarks.append(path) (bdir, bname) = os.path.split(self.__bookmarks_path) (fd, temppath) = tempfile.mkstemp('.tmp', bname, bdir) f = os.fdopen(fd, 'w') for mark in self.__bookmarks: f.write(path_tourl(mark)) f.write('\n') f.close() atomic_rename(temppath, self.__bookmarks_path) # Might as well signal now dispatcher.send(sender=self)
def __on_hostchange(self): try: _logger.debug("reading %s", self.__path) f = open(self.__path) except: _logger.debug("failed to open known hosts") f = None hosts = set() if f is not None: for line in f: hostip,rest = line.split(' ', 1) if hostip.find(',') > 0: host = hostip.split(',', 1)[0] else: host = hostip host = host.strip() hosts.add(host) f.close() self.__hostcache = hosts _logger.debug("ssh cache: %r", self.__hostcache) # Do this in an idle to avoid recursion call_idle_once(lambda: dispatcher.send(sender=self))
def __on_hostchange(self): try: _logger.debug("reading %s", self.__path) f = open(self.__path) except: _logger.debug("failed to open known hosts") f = None hosts = set() if f is not None: for line in f: hostip, rest = line.split(' ', 1) if hostip.find(',') > 0: host = hostip.split(',', 1)[0] else: host = hostip host = host.strip() hosts.add(host) f.close() self.__hostcache = hosts _logger.debug("ssh cache: %r", self.__hostcache) # Do this in an idle to avoid recursion call_idle_once(lambda: dispatcher.send(sender=self))
def __run(self, *args, **kwargs): if self._cancelled: _logger.debug("%s cancelled, returning", self) self.output.put(self.map_fn(None)) return try: matched_files = [] oldlen = 0 for globarg_in in self.args: if isinstance(globarg_in, CommandArgument) and globarg_in.isquoted: globarg = globarg_in newlen = oldlen else: globarg = os.path.expanduser(globarg_in) matched_files.extend( hotwire.fs.dirglob(self.context.cwd, globarg)) _logger.debug("glob on %s matched is: %s", globarg_in, matched_files) newlen = len(matched_files) if oldlen == newlen: matched_files.append(globarg) newlen += 1 oldlen = newlen target_args = [matched_files] _logger.info("Execute '%s' args: %s options: %s", self.builtin, target_args, self.context.options) kwargs = {} if self.context.options and not self.builtin.flattened_args: kwargs['options'] = self.context.options if self.input is not None and self.input.opt_type and not self.in_redir: kwargs['in_opt_format'] = self.input.opt_type if self.output.opt_type and not self.out_redir: kwargs['out_opt_format'] = self.output.opt_type if self.in_redir: _logger.debug("input redirected, opening %s", self.in_redir) self.context.input = CommandFileQueue( open_text_file(self.in_redir, 'r')) if self.out_redir: _logger.debug("output redirected, opening %s", self.out_redir) outfile = open_text_file(self.out_redir, self.out_append and 'a+' or 'w') else: outfile = None try: exectarget = self.builtin.execfunc if self.builtin.flattened_args: target_args = target_args[0] execresult = exectarget(self.context, *target_args, **kwargs) if self.builtin.singlevalue: if outfile: outfile.write(str(execresult)) else: self.output.put(execresult) else: for result in execresult: # if it has status, let it do its own cleanup if self._cancelled and not self.builtin.hasstatus: _logger.debug("%s cancelled, returning", self) self.output.put(self.map_fn(None)) dispatcher.send('complete', self) return if outfile and (result is not None): result = str(result) outfile.write(result) else: self.output.put(self.map_fn(result)) finally: if outfile: outfile.close() self.builtin.cleanup(self.context) except Exception as e: _logger.debug("Caught exception from command: %s", e, exc_info=True) if self.__executing_sync: raise else: dispatcher.send('exception', self, e) self.output.put(self.map_fn(None)) dispatcher.send('complete', self)
def __idle_emit_changed(self): responses = dispatcher.send(sender=self) _logger.debug("idle changed dispatch from %r, responses=%r", self, responses)
def register(self, lang): if lang.uuid in self.__langs: raise ValueError("Language uuid %s already registered", lang.uuid) self.__langs[lang.uuid] = lang dispatcher.send(sender=self)
def on_meta(*args): dispatcher.send('metadata', self, *args)
def __run(self, *args, **kwargs): if self._cancelled: _logger.debug("%s cancelled, returning", self) self.output.put(self.map_fn(None)) return try: matched_files = [] oldlen = 0 for globarg_in in self.args: if isinstance(globarg_in, CommandArgument) and globarg_in.isquoted: globarg = globarg_in newlen = oldlen else: globarg = os.path.expanduser(globarg_in) matched_files.extend(hotwire.fs.dirglob(self.context.cwd, globarg)) _logger.debug("glob on %s matched is: %s", globarg_in, matched_files) newlen = len(matched_files) if oldlen == newlen: matched_files.append(globarg) newlen += 1 oldlen = newlen target_args = [matched_files] _logger.info("Execute '%s' args: %s options: %s", self.builtin, target_args, self.context.options) kwargs = {} if self.context.options and not self.builtin.flattened_args: kwargs['options'] = self.context.options if self.input is not None and self.input.opt_type and not self.in_redir: kwargs['in_opt_format'] = self.input.opt_type if self.output.opt_type and not self.out_redir: kwargs['out_opt_format'] = self.output.opt_type if self.in_redir: _logger.debug("input redirected, opening %s", self.in_redir) self.context.input = CommandFileQueue(open_text_file(self.in_redir, 'r')) if self.out_redir: _logger.debug("output redirected, opening %s", self.out_redir) outfile = open_text_file(self.out_redir, self.out_append and 'a+' or 'w') else: outfile = None try: exectarget = self.builtin.execfunc if self.builtin.flattened_args: target_args = target_args[0] execresult = exectarget(self.context, *target_args, **kwargs) if self.builtin.singlevalue: if outfile: outfile.write(str(execresult)) else: self.output.put(execresult) else: for result in execresult: # if it has status, let it do its own cleanup if self._cancelled and not self.builtin.hasstatus: _logger.debug("%s cancelled, returning", self) self.output.put(self.map_fn(None)) dispatcher.send('complete', self) return if outfile and (result is not None): result = str(result) outfile.write(result) else: self.output.put(self.map_fn(result)) finally: if outfile: outfile.close() self.builtin.cleanup(self.context) except Exception as e: _logger.debug("Caught exception from command: %s", e, exc_info=True) if self.__executing_sync: raise else: dispatcher.send('exception', self, e) self.output.put(self.map_fn(None)) dispatcher.send('complete', self)
def __on_bookmarks_changed(self, *args): self.__read_bookmarks() dispatcher.send(sender=self)
class Command(object): """Represents a complete executable object in a pipeline.""" def __init__(self, builtin, args, options, hotwire, tokens=None, in_redir=None, out_redir=None, out_append=False): super(Command, self).__init__() self.builtin = builtin self.context = CommandContext(hotwire) # The concept of multiple object streams is dead. #for schema in self.builtin.get_aux_outputs(): # self.context.attach_auxstream(CommandAuxStream(self, schema)) if self.builtin.hasmeta: def on_meta(*args): dispatcher.send('metadata', self, *args) self.context.set_metadata_handler(on_meta) self.input = None self.output = CommandQueue() self.map_fn = lambda x: x self.args = args self.context.options = options self.in_redir = in_redir and FilePath(os.path.expanduser(in_redir), self.context.cwd) self.out_redir = out_redir and FilePath(os.path.expanduser(out_redir), self.context.cwd) self.out_append = out_append self.__thread = None self.__executing_sync = None self._cancelled = False self.__tokens = tokens def set_pipeline(self, pipeline): self.context.set_pipeline(pipeline) def set_input(self, input, is_first=False): self.input = input self.context.input = self.input self.context.input_is_first = is_first def set_input_type(self, in_type): """Note the pipeline object type used for input.""" self.context.input_type = in_type def disconnect(self): self.context = None def cancel(self): if self._cancelled: return self._cancelled = True self.context.cancelled = True if self.context.input: self.context.input.cancel() self.builtin.cancel(self.context) def get_input_opt_formats(self): return self.builtin.input_opt_formats def get_output_opt_formats(self): return self.builtin.output_opt_formats def execute(self, force_sync, **kwargs): if force_sync or not self.builtin.threaded: _logger.debug("executing sync: %s", self) self.__executing_sync = True self.__run(**kwargs) else: _logger.debug("executing async: %s", self) self.__executing_sync = False self.__thread = threading.Thread(target=self.__run) self.__thread.setDaemon(True) self.__thread.start() def set_output_queue(self, queue, map_fn): self.output = queue self.map_fn = map_fn def get_auxstreams(self): for obj in self.context.get_auxstreams(): yield obj def get_tokens(self): return self.__tokens def __run(self, *args, **kwargs): if self._cancelled: _logger.debug("%s cancelled, returning", self) self.output.put(self.map_fn(None)) return try: matched_files = [] oldlen = 0 for globarg_in in self.args: if isinstance(globarg_in, CommandArgument) and globarg_in.isquoted: globarg = globarg_in newlen = oldlen else: globarg = os.path.expanduser(globarg_in) matched_files.extend(hotwire.fs.dirglob(self.context.cwd, globarg)) _logger.debug("glob on %s matched is: %s", globarg_in, matched_files) newlen = len(matched_files) if oldlen == newlen: matched_files.append(globarg) newlen += 1 oldlen = newlen target_args = [matched_files] _logger.info("Execute '%s' args: %s options: %s", self.builtin, target_args, self.context.options) kwargs = {} if self.context.options and not self.builtin.flattened_args: kwargs['options'] = self.context.options if self.input is not None and self.input.opt_type and not self.in_redir: kwargs['in_opt_format'] = self.input.opt_type if self.output.opt_type and not self.out_redir: kwargs['out_opt_format'] = self.output.opt_type if self.in_redir: _logger.debug("input redirected, opening %s", self.in_redir) self.context.input = CommandFileQueue(open_text_file(self.in_redir, 'r')) if self.out_redir: _logger.debug("output redirected, opening %s", self.out_redir) outfile = open_text_file(self.out_redir, self.out_append and 'a+' or 'w') else: outfile = None try: exectarget = self.builtin.execfunc if self.builtin.flattened_args: target_args = target_args[0] execresult = exectarget(self.context, *target_args, **kwargs) if self.builtin.singlevalue: if outfile: outfile.write(unicode(execresult)) else: self.output.put(execresult) else: for result in execresult: # if it has status, let it do its own cleanup if self._cancelled and not self.builtin.hasstatus: _logger.debug("%s cancelled, returning", self) self.output.put(self.map_fn(None)) dispatcher.send('complete', self) return if outfile and (result is not None): result = unicode(result) outfile.write(result) else: self.output.put(self.map_fn(result)) finally: if outfile: outfile.close() self.builtin.cleanup(self.context) except Exception, e: _logger.debug("Caught exception from command: %s", e, exc_info=True) if self.__executing_sync: raise else: dispatcher.send('exception', self, e) self.output.put(self.map_fn(None)) dispatcher.send('complete', self)