def testNotifications(self):
     capture = StringIO()
     logger = notification.useNotifyByWriteFile(capture)
     def block():
         def listener(): pass
         self.pub.subscribe(listener, 'testNotifications')
     block()
Exemple #2
0
    def test1(self):
        #
        # Test printing of topic tree
        #
        topicMgr = self.pub.getDefaultTopicMgr()

        root = topicMgr.getOrCreateTopic('a2')
        topicMgr.getOrCreateTopic('a2.a.a')
        topicMgr.getOrCreateTopic('a2.a.b')
        topicMgr.getOrCreateTopic('a2.b.a')
        topicMgr.getOrCreateTopic('a2.b.b')

        from wx.lib.six import StringIO
        buffer = StringIO()
        printTreeDocs(rootTopic=root, width=70, fileObj=buffer)
        self.assertEqual(buffer.getvalue(), self.expectedOutput)
    def test1(self):
        #
        # Test printing of topic tree
        #
        topicMgr = self.pub.getDefaultTopicMgr()

        root = topicMgr.getOrCreateTopic('a2')
        topicMgr.getOrCreateTopic('a2.a.a')
        topicMgr.getOrCreateTopic('a2.a.b')
        topicMgr.getOrCreateTopic('a2.b.a')
        topicMgr.getOrCreateTopic('a2.b.b')

        from wx.lib.six import StringIO
        buffer = StringIO()
        printTreeDocs(rootTopic=root, width=70, fileObj=buffer)
        self.assertEqual( buffer.getvalue(), self.expectedOutput )
Exemple #4
0
def getTokens(command):
    """Return list of token tuples for command."""

    # In case the command is unicode try encoding it
    if isinstance(command, str):
        try:
            command = command.encode('utf-8')
        except UnicodeEncodeError:
            pass  # otherwise leave it alone

    f = StringIO(command)
    # tokens is a list of token tuples, each looking like:
    # (type, string, (srow, scol), (erow, ecol), line)
    tokens = []
    # Can't use list comprehension:
    #   tokens = [token for token in tokenize.generate_tokens(f.readline)]
    # because of need to append as much as possible before TokenError.
    try:
        ##        This code wasn't backward compatible with Python 2.1.3.
        ##
        ##        for token in tokenize.generate_tokens(f.readline):
        ##            tokens.append(token)

        # This works with Python 2.1.3 (with nested_scopes).
        if not PY3:

            def eater(*args):
                tokens.append(args)

            tokenize.tokenize_loop(f.readline, eater)
        else:
            tokenize.tokenize(f.readline)
    except tokenize.TokenError:
        # This is due to a premature EOF, which we expect since we are
        # feeding in fragments of Python code.
        pass
    return tokens
Exemple #5
0
 def captureStdout():
     from wx.lib.six import StringIO
     capture = StringIO()
     useNotifyByWriteFile( fileObj = capture )
     return capture
Exemple #6
0
def getPyImage(shellName='PyCrust'):
    stream = StringIO(getPyData(shellName))
    return wx.Image(stream)