Esempio n. 1
0
class RubyTest(unittest.TestCase):

    def setUp(self):
        self.lexer = HttpLexer()
        self.maxDiff = None

    def testApplicationXml(self):
        fragment = u'GET / HTTP/1.0\nContent-Type: application/xml\n\n<foo>\n'
        tokens = [
            (Token.Name.Tag, u'<foo'),
            (Token.Name.Tag, u'>'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(
            tokens, list(self.lexer.get_tokens(fragment))[-len(tokens):])

    def testApplicationCalendarXml(self):
        fragment = u'GET / HTTP/1.0\nContent-Type: application/calendar+xml\n\n<foo>\n'
        tokens = [
            (Token.Name.Tag, u'<foo'),
            (Token.Name.Tag, u'>'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(
            tokens, list(self.lexer.get_tokens(fragment))[-len(tokens):])
Esempio n. 2
0
    def __init__(self, collection_file, env_file, autosave=False,
                 style='fruity'):
        self.r = Requestor(collection_file, env_file)
        self.autosave = autosave

        self.http_lexer = HttpLexer()
        self.json_lexer = JsonLexer()
        self.python_lexer = Python3Lexer()
        self.formatter = Terminal256Formatter(style=style)
Esempio n. 3
0
def cli_profile(pids, process_names, color, request, response):
    """
    Sniff using CFNetowrkDiagnostics.mobileconfig profile.

    This requires the specific Apple profile to be installed for the sniff to work.
    """
    lockdown = LockdownClient()

    for entry in OsTraceService(lockdown).syslog():
        if entry.label is None or entry.label.subsystem != 'com.apple.CFNetwork' or \
                entry.label.category != 'Diagnostics':
            continue

        if pids and (entry.pid not in pids):
            continue

        if process_names and (posixpath.basename(entry.filename)
                              not in process_names):
            continue

        lines = entry.message.split('\n')
        if len(lines) < 2:
            continue

        buf = ''

        if lines[1].strip().startswith(
                'Protocol Enqueue: request') and request:
            # request
            print('➡️   ', end='')
            fields = parse_fields(entry.message)
            buf += f'{fields["Message"]}\n'
            for name, value in fields.items():
                if name in ('Protocol Enqueue', 'Request', 'Message'):
                    continue
                buf += f'{name}: {value}\n'

        elif lines[1].strip().startswith(
                'Protocol Received: request') and response:
            # response
            print('⬅️   ', end='')
            fields = parse_fields(entry.message)
            buf += f'{fields["Response"]} ({fields["Protocol Received"]})\n'
            for name, value in fields.items():
                if name in ('Protocol Received', 'Response'):
                    continue
                buf += f'{name}: {value}\n'

        if buf:
            if color:
                print(
                    highlight(buf, HttpLexer(),
                              TerminalTrueColorFormatter(style='autumn')))
            else:
                print(buf)
Esempio n. 4
0
class RubyTest(unittest.TestCase):
    def setUp(self):
        self.lexer = HttpLexer()
        self.maxDiff = None

    def testApplicationXml(self):
        fragment = u'GET / HTTP/1.0\nContent-Type: application/xml\n\n<foo>\n'
        tokens = [
            (Token.Name.Tag, u'<foo'),
            (Token.Name.Tag, u'>'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens,
                         list(self.lexer.get_tokens(fragment))[-len(tokens):])

    def testApplicationCalendarXml(self):
        fragment = u'GET / HTTP/1.0\nContent-Type: application/calendar+xml\n\n<foo>\n'
        tokens = [
            (Token.Name.Tag, u'<foo'),
            (Token.Name.Tag, u'>'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens,
                         list(self.lexer.get_tokens(fragment))[-len(tokens):])
Esempio n. 5
0
class HybridHttpLexer(Lexer):
    tl = TextLexer()
    hl = HttpLexer()

    def __init__(self, max_len=50000, *args, **kwargs):
        self.max_len = max_len
        Lexer.__init__(self, *args, **kwargs)

    def get_tokens_unprocessed(self, text):
        try:
            split = re.split(r"(?:\r\n|\n)(?:\r\n|\n)", text, 1)
            if len(split) == 2:
                h = split[0]
                body = split[1]
            else:
                h = split[0]
                body = ''
        except Exception as e:
            for v in self.tl.get_tokens_unprocessed(text):
                yield v
            raise e

        for token in self.hl.get_tokens_unprocessed(h):
            yield token

        if len(body) > 0:
            if len(body) <= self.max_len or self.max_len < 0:
                second_parser = None
                if "Content-Type" in h:
                    try:
                        ct = re.search("Content-Type: (.*)", h)
                        if ct is not None:
                            hval = ct.groups()[0]
                            mime = hval.split(";")[0]
                            second_parser = get_lexer_for_mimetype(mime)
                    except ClassNotFound:
                        pass
                if second_parser is None:
                    yield (len(h), Token.Text, text[len(h):])
                else:
                    for index, tokentype, value in second_parser.get_tokens_unprocessed(
                            text[len(h):]):
                        yield (index + len(h), tokentype, value)
            else:
                yield (len(h), Token.Text, text[len(h):])
Esempio n. 6
0
def show_http_packet(http_packet, filter_headers):
    buf = ''
    version = 'HTTP/1.0'
    if http_packet['httpVersion'] == 'h2':
        version = 'HTTP/2.0'

    if 'url' in http_packet:
        # request
        url = urlparse(http_packet['url'])
        uri = url.path
        if url.query:
            uri += f'?{url.query}'

        buf += f'{http_packet["method"]} {uri} {version}\r\n'
    else:
        # response
        if http_packet['status'] == 0:
            # isn't a real packet
            return
        buf += f'{version} {http_packet["status"]} {http_packet["statusText"]}\r\n'

    for header in http_packet['headers']:
        if (filter_headers is not None) and (len(filter_headers) > 0) and \
                not is_in_insensitive_list(header['name'], filter_headers):
            continue
        buf += f'{header["name"]}: {header["value"]}\r\n'

    buf += '\r\n'

    content = {}

    if 'postData' in http_packet:
        content = http_packet['postData']

    if 'content' in http_packet:
        content = http_packet['content']

    print(
        highlight(buf, HttpLexer(),
                  TerminalTrueColorFormatter(style='autumn')))

    if 'text' in content:
        print(content['text'])
Esempio n. 7
0
def lexer():
    yield HttpLexer()
Esempio n. 8
0
 def setUp(self):
     self.lexer = HttpLexer()
     self.maxDiff = None
Esempio n. 9
0
 def setUp(self):
     self.lexer = HttpLexer()
     self.maxDiff = None
Esempio n. 10
0
except ImportError:
    import warnings
    warnings.warn('Pygments not found, highlighting unavailable.')

    http_lexer, formatter = None, None

    def _highlight(raw: Union[str, bytes], http_lexer: None,
                   formatter: None) -> str:
        return raw.decode('utf-8') if isinstance(raw, bytes) else raw

    def colorize(tokens: Union[tuple, list]) -> str:
        return ''.join(value for ttype, value in tokens)

else:
    http_lexer = HttpLexer(stripnl=False, ensurenl=False, encoding='utf-8')
    formatter = Terminal256Formatter()

    def colorize(tokens: Union[tuple, list]) -> str:
        with StringIO() as out:
            formatter.format(tokens, out)
            return out.getvalue()


def highlight(raw: str) -> str:
    highlighted = _highlight(raw, http_lexer, formatter)
    if highlighted[-1] == '\n':
        highlighted = highlighted[:-1] + '⏎\n'

    return highlighted