Ejemplo n.º 1
0
def test_detection():
    responses.add(responses.GET,
                  re.compile(r"http://perdu.com/\?vuln=.*env.*"),
                  body="PATH=/bin:/usr/bin;PWD=/")

    responses.add(responses.GET,
                  re.compile(r"http://perdu.com/\?vuln=.*"),
                  body="Hello there")

    persister = FakePersister()

    request = Request("http://perdu.com/?vuln=hello")
    request.path_id = 1
    persister.requests.append(request)

    crawler = Crawler("http://perdu.com/", timeout=1)
    options = {"timeout": 10, "level": 1}
    logger = Mock()

    module = mod_exec(crawler, persister, logger, options)
    module.verbose = 2
    for __ in module.attack():
        pass

    assert persister.vulnerabilities
    assert persister.vulnerabilities[0][0] == "vuln"
    assert "env" in persister.vulnerabilities[0][1]
Ejemplo n.º 2
0
def test_whole_stuff():
    # Test attacking all kind of parameter without crashing
    responses.add(responses.GET,
                  re.compile(r"http://perdu.com/"),
                  body="Hello there")

    persister = FakePersister()

    request = Request("http://perdu.com/")
    request.path_id = 1
    persister.requests.append(request)

    request = Request("http://perdu.com/?foo=bar")
    request.path_id = 2
    persister.requests.append(request)

    request = Request(
        "http://perdu.com/?foo=bar",
        post_params=[["a", "b"]],
        file_params=[["file", ["calendar.xml", "<xml>Hello there</xml"]]])
    request.path_id = 3
    persister.requests.append(request)

    crawler = Crawler("http://perdu.com/", timeout=1)
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_exec(crawler, persister, logger, options)
    module.verbose = 2
    module.do_post = True
    for __ in module.attack():
        pass

    assert True
Ejemplo n.º 3
0
async def test_detection():
    respx.get(url__regex=r"http://perdu\.com/\?vuln=.*env.*").mock(
        return_value=httpx.Response(200, text="PATH=/bin:/usr/bin;PWD=/"))

    respx.get(url__regex=r"http://perdu\.com/\?vuln=.*").mock(
        return_value=httpx.Response(200, text="Hello there"))

    persister = AsyncMock()

    request = Request("http://perdu.com/?vuln=hello")
    request.path_id = 1

    crawler = AsyncCrawler("http://perdu.com/", timeout=1)
    options = {"timeout": 10, "level": 1}
    logger = Mock()

    module = mod_exec(crawler, persister, logger, options, Event())
    module.verbose = 2
    await module.attack(request)

    assert persister.add_payload.call_count == 1
    assert persister.add_payload.call_args_list[0][1]["module"] == "exec"
    assert persister.add_payload.call_args_list[0][1]["category"] == _(
        "Command execution")
    assert persister.add_payload.call_args_list[0][1][
        "request"].get_params == [["vuln", ";env;"]]
    await crawler.close()
Ejemplo n.º 4
0
async def test_detection():
    respx.get(url__regex=r"http://perdu\.com/\?vuln=.*env.*").mock(
        return_value=httpx.Response(200, text="PATH=/bin:/usr/bin;PWD=/"))

    respx.get(url__regex=r"http://perdu\.com/\?vuln=.*").mock(
        return_value=httpx.Response(200, text="Hello there"))

    persister = FakePersister()

    request = Request("http://perdu.com/?vuln=hello")
    request.path_id = 1
    persister.requests.append(request)

    crawler = AsyncCrawler("http://perdu.com/", timeout=1)
    options = {"timeout": 10, "level": 1}
    logger = Mock()

    module = mod_exec(crawler, persister, logger, options, Event())
    module.verbose = 2
    await module.attack(request)

    assert persister.vulnerabilities
    assert persister.vulnerabilities[0][0] == "vuln"
    assert "env" in persister.vulnerabilities[0][1]
    await crawler.close()
Ejemplo n.º 5
0
def test_blind_detection():
    def timeout_callback(http_request):
        if "sleep" in http_request.url:
            raise ReadTimeout("Read timed out")
        return 200, {}, "Hello there"

    responses.add_callback(responses.GET,
                           re.compile(r"http://perdu.com/\?vuln=.*"),
                           callback=timeout_callback)

    persister = FakePersister()

    request = Request("http://perdu.com/?vuln=hello")
    request.path_id = 2
    persister.requests.append(request)

    crawler = Crawler("http://perdu.com/", timeout=1)
    options = {"timeout": 1, "level": 1}
    logger = Mock()

    module = mod_exec(crawler, persister, logger, options)
    module.verbose = 2
    module.do_post = False

    payloads_until_sleep = 0
    for payload, flags in module.payloads:
        if "sleep" in payload:
            break
        payloads_until_sleep += 1

    for __ in module.attack():
        pass

    assert persister.vulnerabilities
    assert persister.vulnerabilities[0][0] == "vuln"
    assert "sleep" in persister.vulnerabilities[0][1]
    # We should have all payloads till "sleep" ones
    # then 3 requests for the sleep payload (first then two retries to check random lags)
    # then 1 request to check state of original request
    assert len(responses.calls) == payloads_until_sleep + 3 + 1
Ejemplo n.º 6
0
async def test_whole_stuff():
    # Test attacking all kind of parameter without crashing
    respx.get(url__regex=r"http://perdu\.com/.*").mock(
        httpx.Response(200, text="Hello there"))
    respx.post(url__regex=r"http://perdu\.com/.*").mock(
        httpx.Response(200, text="Hello there"))

    persister = AsyncMock()
    all_requests = []

    request = Request("http://perdu.com/")
    request.path_id = 1
    all_requests.append(request)

    request = Request("http://perdu.com/?foo=bar")
    request.path_id = 2
    all_requests.append(request)

    request = Request("http://perdu.com/?foo=bar",
                      post_params=[["a", "b"]],
                      file_params=[[
                          "file",
                          ("calendar.xml", "<xml>Hello there</xml",
                           "application/xml")
                      ]])
    request.path_id = 3
    all_requests.append(request)

    crawler = AsyncCrawler("http://perdu.com/", timeout=1)
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_exec(crawler, persister, logger, options, Event())
    module.verbose = 2
    module.do_post = True
    for request in all_requests:
        await module.attack(request)

    assert True
    await crawler.close()
Ejemplo n.º 7
0
async def test_blind_detection():
    def timeout_callback(http_request):
        if "sleep" in str(http_request.url):
            raise httpx.ReadTimeout("Read timed out", request=http_request)
        return httpx.Response(200, text="Hello there")

    respx.get(url__regex=r"http://perdu.com/\?vuln=.*").mock(
        side_effect=timeout_callback)

    persister = AsyncMock()

    request = Request("http://perdu.com/?vuln=hello")
    request.path_id = 2

    crawler = AsyncCrawler("http://perdu.com/", timeout=1)
    options = {"timeout": 1, "level": 1}
    logger = Mock()

    module = mod_exec(crawler, persister, logger, options, Event())
    module.verbose = 2
    module.do_post = False

    payloads_until_sleep = 0
    for payload, __ in module.payloads:
        if "sleep" in payload:
            break
        payloads_until_sleep += 1

    await module.attack(request)

    assert persister.add_payload.call_count == 1
    assert persister.add_payload.call_args_list[0][1][
        "request"].get_params == [['vuln', 'a`sleep 60`']]
    # We should have all payloads till "sleep" ones
    # then 3 requests for the sleep payload (first then two retries to check random lags)
    # then 1 request to check state of original request
    assert respx.calls.call_count == payloads_until_sleep + 3 + 1
    await crawler.close()