Esempio n. 1
0
async def test_implies_detection():
    # Test for implied applications
    respx.get("http://perdu.com/").mock(return_value=httpx.Response(
        200,
        text=
        "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \
            <h2>Pas de panique, on va vous aider</h2> \
            <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong> \
            </body></html>",
        headers={"X-Generator": "Backdrop CMS 4.5"}))

    persister = AsyncMock()
    home_dir = os.getenv("HOME") or os.getenv("USERPROFILE")
    base_dir = os.path.join(home_dir, ".wapiti")
    persister.CONFIG_DIR = os.path.join(base_dir, "config")

    request = Request("http://perdu.com")
    request.path_id = 1

    crawler = AsyncCrawler("http://perdu.com")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options, Event())
    module.verbose = 2

    await module.attack(request)

    assert persister.add_payload.call_count == 3
    assert persister.add_payload.call_args_list[0][1]["info"] == (
        '{"versions": ["4.5"], "name": "Backdrop", "categories": ["CMS"]}')
    assert persister.add_payload.call_args_list[-1][1]["info"] == (
        '{"versions": [], "name": "PHP", "categories": ["Programming languages"]}'
    )
    await crawler.close()
Esempio n. 2
0
async def test_cookies_detection():
    # Test if application is detected using its cookies regex
    respx.get("http://perdu.com/").mock(return_value=httpx.Response(
        200,
        text=
        "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \
            <h2>Pas de panique, on va vous aider</h2> \
            <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong> \
            </body></html>",
        headers={"Set-Cookie": "ci_csrf_token=4.1"}))

    persister = AsyncMock()
    home_dir = os.getenv("HOME") or os.getenv("USERPROFILE")
    base_dir = os.path.join(home_dir, ".wapiti")
    persister.CONFIG_DIR = os.path.join(base_dir, "config")

    request = Request("http://perdu.com/")
    request.path_id = 1

    crawler = AsyncCrawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options, Event())
    module.verbose = 2

    await module.attack(request)

    assert persister.add_payload.call_count
    assert persister.add_payload.call_args_list[0][1]["info"] == (
        '{"versions": ["2+"], "name": "CodeIgniter", "categories": ["Web frameworks"]}'
    )
    await crawler.close()
Esempio n. 3
0
async def test_url_detection():
    # Test if application is detected using its url regex
    respx.get(
        "http://perdu.com/owa/auth/logon.aspx"
    ).mock(return_value=httpx.Response(
        200,
        text=
        "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \
            <h2>Pas de panique, on va vous aider</h2> \
            <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong></body></html>"
    ))

    persister = FakePersister()

    request = Request("http://perdu.com/owa/auth/logon.aspx")
    request.path_id = 1

    crawler = AsyncCrawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options, Event())
    module.verbose = 2

    await module.attack(request)

    assert persister.additionals
    assert persister.additionals[
        2] == '{"versions": [], "name": "Outlook Web App", "categories": ["Webmail"]}'
    await crawler.close()
Esempio n. 4
0
def test_url_detection():
    # Test if application is detected using its url regex
    responses.add(
        responses.GET,
        url="http://perdu.com/owa/auth/logon.aspx",
        body=
        "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \
        <h2>Pas de panique, on va vous aider</h2> \
        <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong></body></html>"
    )

    persister = FakePersister()

    request = Request("http://perdu.com/owa/auth/logon.aspx")
    request.path_id = 1
    persister.requests.append(request)

    crawler = Crawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options)
    module.verbose = 2

    for __ in module.attack():
        pass

    assert persister.additionals
    assert persister.additionals[
        2] == '{"versions": [], "name": "Outlook Web App", "categories": ["Webmail"]}'
Esempio n. 5
0
async def test_multi_detection():
    # Test if application is detected using several ways
    respx.get("http://perdu.com/").mock(return_value=httpx.Response(
        200,
        text="<html><head><title>Vous Etes Perdu ?</title> \
            <meta name=\"generator\" content=\"WordPress 5.6.1\">    \
            </head><body><h1>Perdu sur l'Internet ?</h1> \
            <h2>Pas de panique, on va vous aider</h2> \
            <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong> \
            <script type=\"text/javascript\" src=\"https://perdu.com/wp-includes/js/wp-embed.min.js\" ></script> \
            </body></html>",
        headers={
            "link": "<http://perdu.com/wp-json/>; rel=\"https://api.w.org/\""
        }))

    persister = FakePersister()

    request = Request("http://perdu.com/")
    request.path_id = 1

    crawler = AsyncCrawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options, Event())
    module.verbose = 2

    await module.attack(request)

    assert persister.additionals
    assert persister.additionals[
        -1] == '{"versions": ["5.6.1"], "name": "WordPress", "categories": ["CMS", "Blogs"]}'
    await crawler.close()
Esempio n. 6
0
async def test_meta_detection():
    # Test if application is detected using its meta regex
    respx.get("http://perdu.com/").mock(return_value=httpx.Response(
        200,
        text="<html><head><title>Vous Etes Perdu ?</title> \
            <meta name=\"generator\" content=\"Planet/1.6.2\">    \
            </head><body><h1>Perdu sur l'Internet ?</h1> \
            <h2>Pas de panique, on va vous aider</h2> \
            <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong> \
            </body></html>"))

    persister = FakePersister()

    request = Request("http://perdu.com/")
    request.path_id = 1

    crawler = AsyncCrawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options, Event())
    module.verbose = 2

    await module.attack(request)

    assert persister.additionals
    assert persister.additionals[
        0] == '{"versions": ["1.6.2"], "name": "Planet", "categories": ["Feed readers"]}'
    await crawler.close()
Esempio n. 7
0
async def test_cookies_detection():
    # Test if application is detected using its cookies regex
    respx.get("http://perdu.com/").mock(return_value=httpx.Response(
        200,
        text=
        "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \
            <h2>Pas de panique, on va vous aider</h2> \
            <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong> \
            </body></html>",
        headers={"Set-Cookie": "ci_csrf_token=4.1"}))

    persister = FakePersister()

    request = Request("http://perdu.com/")
    request.path_id = 1

    crawler = AsyncCrawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options, Event())
    module.verbose = 2

    await module.attack(request)

    assert persister.additionals
    assert persister.additionals[
        0] == '{"versions": ["2+"], "name": "CodeIgniter", "categories": ["Web frameworks"]}'
    await crawler.close()
Esempio n. 8
0
async def test_script_detection():
    # Test if application is detected using its script regex
    respx.get("http://perdu.com/").mock(return_value=httpx.Response(
        200,
        text=
        "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \
            <h2>Pas de panique, on va vous aider</h2> \
            <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong> \
            <script src=\"http://chartjs.org/dist/1.4.2/Chart.js\"></script>\
            </body></html>"))

    persister = FakePersister()

    request = Request("http://perdu.com/")
    request.path_id = 1

    crawler = AsyncCrawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options, Event())
    module.verbose = 2

    await module.attack(request)

    assert persister.additionals
    assert persister.additionals[0] == \
           '{"versions": ["1.4.2"], "name": "Chart.js", "categories": ["JavaScript graphics"]}'
    await crawler.close()
Esempio n. 9
0
async def test_implies_detection():
    # Test for implied applications
    respx.get("http://perdu.com/").mock(return_value=httpx.Response(
        200,
        text=
        "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \
            <h2>Pas de panique, on va vous aider</h2> \
            <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong> \
            </body></html>",
        headers={"X-Generator": "Backdrop CMS 4.5"}))

    persister = FakePersister()

    request = Request("http://perdu.com")
    request.path_id = 1

    crawler = AsyncCrawler("http://perdu.com")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options, Event())
    module.verbose = 2

    await module.attack(request)

    assert persister.additionals
    assert persister.additionals[
        0] == '{"versions": ["4.5"], "name": "Backdrop", "categories": ["CMS"]}'
    assert persister.additionals[
        1] == '{"versions": [], "name": "PHP", "categories": ["Programming languages"]}'
    await crawler.close()
Esempio n. 10
0
async def test_meta_detection():
    # Test if application is detected using its meta regex
    respx.get("http://perdu.com/").mock(
        return_value=httpx.Response(
            200,
            text="<html><head><title>Vous Etes Perdu ?</title> \
            <meta name=\"generator\" content=\"Planet/1.6.2\">    \
            </head><body><h1>Perdu sur l'Internet ?</h1> \
            <h2>Pas de panique, on va vous aider</h2> \
            <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong> \
            </body></html>"
        )
    )

    persister = AsyncMock()
    home_dir = os.getenv("HOME") or os.getenv("USERPROFILE")
    base_dir = os.path.join(home_dir, ".wapiti")
    persister.CONFIG_DIR = os.path.join(base_dir, "config")

    request = Request("http://perdu.com/")
    request.path_id = 1

    crawler = AsyncCrawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}

    module = mod_wapp(crawler, persister, options, Event())
    module.verbose = 2

    await module.attack(request)

    assert persister.add_payload.call_count
    assert persister.add_payload.call_args_list[0][1]["info"] == (
        '{"versions": ["1.6.2"], "name": "Planet", "categories": ["Feed readers"]}'
    )
    await crawler.close()
Esempio n. 11
0
def test_html_detection():
    # Test if application is detected using its html regex
    responses.add(
        responses.GET,
        url="http://perdu.com/",
        body=
        "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \
        <h2>Pas de panique, on va vous aider</h2> \
        <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong> \
        <title>Powered by <a href=\"http://atlassian.com/software/confluence\">Atlassian Confluence</a> 2.8.4</p> \
        </body></html>")

    persister = FakePersister()

    request = Request("http://perdu.com/")
    request.path_id = 1
    persister.requests.append(request)

    crawler = Crawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options)
    module.verbose = 2

    for __ in module.attack():
        pass

    assert persister.additionals
    assert persister.additionals[0] == "Atlassian Confluence 2.8.4"
Esempio n. 12
0
def test_false_positive():
    # Test for false positive
    responses.add_passthru(
        "https://raw.githubusercontent.com/wapiti-scanner/wappalyzer/master/src/technologies.json"
    )

    responses.add(
        responses.GET,
        url="http://perdu.com/",
        body=
        "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \
        <h2>Pas de panique, on va vous aider</h2> \
        <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong></body></html>"
    )

    persister = FakePersister()

    request = Request("http://perdu.com/")
    request.path_id = 1
    persister.requests.append(request)

    crawler = Crawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options)
    module.verbose = 2

    for __ in module.attack():
        pass

    assert not persister.additionals
Esempio n. 13
0
def test_meta_detection():
    # Test if application is detected using its meta regex
    responses.add(responses.GET,
                  url="http://perdu.com/",
                  body="<html><head><title>Vous Etes Perdu ?</title> \
        <meta name=\"generator\" content=\"Planet/1.6.2\">    \
        </head><body><h1>Perdu sur l'Internet ?</h1> \
        <h2>Pas de panique, on va vous aider</h2> \
        <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong> \
        </body></html>")

    persister = FakePersister()

    request = Request("http://perdu.com/")
    request.path_id = 1
    persister.requests.append(request)

    crawler = Crawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options)
    module.verbose = 2

    for __ in module.attack():
        pass

    assert persister.additionals
    assert persister.additionals[
        0] == '{"versions": ["1.6.2"], "name": "Planet", "categories": ["Feed readers"]}'
Esempio n. 14
0
def test_implies_detection():
    # Test for implied applications
    responses.add(
        responses.GET,
        url="http://perdu.com/",
        body=
        "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \
        <h2>Pas de panique, on va vous aider</h2> \
        <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong> \
        </body></html>",
        headers={"X-Generator": "Backdrop CMS 4.5"})

    persister = FakePersister()

    request = Request("http://perdu.com")
    request.path_id = 1
    persister.requests.append(request)

    crawler = Crawler("http://perdu.com")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options)
    module.verbose = 2

    for __ in module.attack():
        pass

    assert persister.additionals
    assert '{"versions": ["4.5"], "name": "Backdrop", "categories": ["CMS"]}' == persister.additionals[
        0]
    assert '{"versions": [], "name": "PHP", "categories": ["Programming languages"]}' == persister.additionals[
        1]
Esempio n. 15
0
def test_script_detection():
    # Test if application is detected using its script regex
    responses.add(
        responses.GET,
        url="http://perdu.com/",
        body=
        "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \
        <h2>Pas de panique, on va vous aider</h2> \
        <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong> \
        <script src=\"http://chartjs.org/dist/1.4.2/Chart.js\"></script>\
        </body></html>")

    persister = FakePersister()

    request = Request("http://perdu.com/")
    request.path_id = 1
    persister.requests.append(request)

    crawler = Crawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options)
    module.verbose = 2

    for __ in module.attack():
        pass

    assert persister.additionals
    assert persister.additionals[
        0] == '{"versions": ["1.4.2"], "name": "Chart.js", "categories": ["JavaScript graphics"]}'
Esempio n. 16
0
def test_cookies_detection():
    # Test if application is detected using its cookies regex
    responses.add(
        responses.GET,
        url="http://perdu.com/",
        body=
        "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \
        <h2>Pas de panique, on va vous aider</h2> \
        <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong> \
        </body></html>",
        headers={"Set-Cookie": "ci_csrf_token=4.1"})

    persister = FakePersister()

    request = Request("http://perdu.com/")
    request.path_id = 1
    persister.requests.append(request)

    crawler = Crawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options)
    module.verbose = 2

    for __ in module.attack():
        pass

    assert persister.additionals
    assert persister.additionals[
        0] == '{"versions": ["2+"], "name": "CodeIgniter", "categories": ["Web frameworks"]}'
Esempio n. 17
0
async def test_html_detection():
    # Test if application is detected using its html regex
    respx.get("http://perdu.com/").mock(
        return_value=httpx.Response(
            200,
            text="<html><head><title>FishEye 2.8.4</title> \
            </head><body><h1>Perdu sur l'Internet ?</h1> \
            <h2>Pas de panique, on va vous aider</h2> \
            </body></html>"
        )
    )

    persister = AsyncMock()
    home_dir = os.getenv("HOME") or os.getenv("USERPROFILE")
    base_dir = os.path.join(home_dir, ".wapiti")
    persister.CONFIG_DIR = os.path.join(base_dir, "config")

    request = Request("http://perdu.com/")
    request.path_id = 1

    crawler = AsyncCrawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}

    module = mod_wapp(crawler, persister, options, Event())
    module.verbose = 2

    await module.attack(request)

    assert persister.add_payload.call_count
    assert persister.add_payload.call_args_list[0][1]["info"] == (
        '{"versions": ["2.8.4"], "name": "Atlassian FishEye", "categories": ["Development"]}'
    )
    await crawler.close()
Esempio n. 18
0
def test_html_detection():
    # Test if application is detected using its html regex
    responses.add(responses.GET,
                  url="http://perdu.com/",
                  body="<html><head><title>FishEye 2.8.4</title> \
        </head><body><h1>Perdu sur l'Internet ?</h1> \
        <h2>Pas de panique, on va vous aider</h2> \
        </body></html>")

    persister = FakePersister()

    request = Request("http://perdu.com/")
    request.path_id = 1
    persister.requests.append(request)

    crawler = Crawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options)
    module.verbose = 2

    for __ in module.attack():
        pass

    assert persister.additionals
    assert persister.additionals[
        0] == '{"versions": ["2.8.4"], "name": "Atlassian FishEye", "categories": ["Development"]}'
Esempio n. 19
0
async def test_html_detection():
    # Test if application is detected using its html regex
    respx.get("http://perdu.com/").mock(return_value=httpx.Response(
        200,
        text="<html><head><title>FishEye 2.8.4</title> \
            </head><body><h1>Perdu sur l'Internet ?</h1> \
            <h2>Pas de panique, on va vous aider</h2> \
            </body></html>"))

    persister = FakePersister()

    request = Request("http://perdu.com/")
    request.path_id = 1

    crawler = AsyncCrawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options, Event())
    module.verbose = 2

    await module.attack(request)

    assert persister.additionals
    assert persister.additionals[0] == \
           '{"versions": ["2.8.4"], "name": "Atlassian FishEye", "categories": ["Development"]}'
    await crawler.close()
Esempio n. 20
0
async def test_url_detection():
    # Test if application is detected using its url regex
    respx.get("http://perdu.com/owa/auth/logon.aspx").mock(
        return_value=httpx.Response(
            200,
            text="<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \
            <h2>Pas de panique, on va vous aider</h2> \
            <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong></body></html>"
        )
    )

    persister = AsyncMock()
    home_dir = os.getenv("HOME") or os.getenv("USERPROFILE")
    base_dir = os.path.join(home_dir, ".wapiti")
    persister.CONFIG_DIR = os.path.join(base_dir, "config")

    request = Request("http://perdu.com/owa/auth/logon.aspx")
    request.path_id = 1

    crawler = AsyncCrawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}

    module = mod_wapp(crawler, persister, options, Event())
    module.verbose = 2

    await module.attack(request)

    assert persister.add_payload.call_count
    assert persister.add_payload.call_args_list[0][1]["module"] == "wapp"
    assert persister.add_payload.call_args_list[0][1]["category"] == _("Fingerprint web technology")
    assert persister.add_payload.call_args_list[2][1]["info"] == (
        '{"versions": [], "name": "Outlook Web App", "categories": ["Webmail"]}'
    )
    await crawler.close()
Esempio n. 21
0
async def test_false_positive():
    # Test for false positive
    respx.route(host="raw.githubusercontent.com").pass_through()

    respx.get("http://perdu.com/").mock(
        return_value=httpx.Response(
            200,
            text="<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \
            <h2>Pas de panique, on va vous aider</h2> \
            <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong></body></html>"
        )
    )

    persister = AsyncMock()
    home_dir = os.getenv("HOME") or os.getenv("USERPROFILE")
    base_dir = os.path.join(home_dir, ".wapiti")
    persister.CONFIG_DIR = os.path.join(base_dir, "config")

    request = Request("http://perdu.com/")
    request.path_id = 1

    crawler = AsyncCrawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}

    module = mod_wapp(crawler, persister, options, Event())
    module.verbose = 2

    await module.attack(request)

    assert not persister.add_payload.call_count
    await crawler.close()
Esempio n. 22
0
def test_headers_detection():
    # Test if application is detected using its headers regex
    responses.add(
        responses.GET,
        url="http://perdu.com/",
        body=
        "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \
        <h2>Pas de panique, on va vous aider</h2> \
        <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong> \
        </body></html>",
        headers={"Server": "Cherokee/1.3.4"})

    persister = FakePersister()

    request = Request("http://perdu.com/")
    request.path_id = 1
    persister.requests.append(request)

    crawler = Crawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options)
    module.verbose = 2

    for __ in module.attack():
        pass

    assert persister.additionals
    assert persister.additionals[0] == "Cherokee 1.3.4"
Esempio n. 23
0
async def test_false_positive():
    # Test for false positive
    respx.route(host="raw.githubusercontent.com").pass_through()

    respx.get("http://perdu.com/").mock(return_value=httpx.Response(
        200,
        text=
        "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \
            <h2>Pas de panique, on va vous aider</h2> \
            <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong></body></html>"
    ))

    persister = FakePersister()

    request = Request("http://perdu.com/")
    request.path_id = 1

    crawler = AsyncCrawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options, Event())
    module.verbose = 2

    await module.attack(request)

    assert not persister.additionals
    await crawler.close()
Esempio n. 24
0
async def test_vulnerabilities():
    # Test for vulnerabilities detected
    respx.get("http://perdu.com/").mock(return_value=httpx.Response(
        200,
        text=
        "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \
            <h2>Pas de panique, on va vous aider</h2> \
            <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong> \
            </body></html>",
        headers={
            "X-Generator": "Backdrop CMS 4.5",
            "Server": "Cherokee/1.3.4"
        }))

    persister = AsyncMock()
    home_dir = os.getenv("HOME") or os.getenv("USERPROFILE")
    base_dir = os.path.join(home_dir, ".wapiti")
    persister.CONFIG_DIR = os.path.join(base_dir, "config")

    request = Request("http://perdu.com")
    request.path_id = 1

    crawler = AsyncCrawler("http://perdu.com")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options, Event())
    module.verbose = 2

    await module.attack(request)

    assert persister.add_payload.call_count == 5
    # FIrst one is an additional
    assert persister.add_payload.call_args_list[0][1]["info"] == (
        '{"versions": ["4.5"], "name": "Backdrop", "categories": ["CMS"]}')
    assert persister.add_payload.call_args_list[0][1]["category"] == _(
        "Fingerprint web technology")

    assert persister.add_payload.call_args_list[3][1]["info"] == (
        '{"versions": ["1.3.4"], "name": "Cherokee", "categories": ["Web servers"]}'
    )
    assert persister.add_payload.call_args_list[3][1]["category"] == _(
        'Fingerprint web server')
    await crawler.close()
Esempio n. 25
0
async def test_multi_detection():
    # Test if application is detected using several ways
    respx.get("http://perdu.com/").mock(
        return_value=httpx.Response(
            200,
            text="<html><head><title>Vous Etes Perdu ?</title> \
            <meta name=\"generator\" content=\"WordPress 5.6.1\">    \
            </head><body><h1>Perdu sur l'Internet ?</h1> \
            <h2>Pas de panique, on va vous aider</h2> \
            <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong> \
            <script type=\"text/javascript\" src=\"https://perdu.com/wp-includes/js/wp-embed.min.js\" ></script> \
            </body></html>",
            headers={"link": "<http://perdu.com/wp-json/>; rel=\"https://api.w.org/\""}
        )
    )

    persister = AsyncMock()
    home_dir = os.getenv("HOME") or os.getenv("USERPROFILE")
    base_dir = os.path.join(home_dir, ".wapiti")
    persister.CONFIG_DIR = os.path.join(base_dir, "config")

    request = Request("http://perdu.com/")
    request.path_id = 1

    crawler = AsyncCrawler("http://perdu.com/")
    options = {"timeout": 10, "level": 2}

    module = mod_wapp(crawler, persister, options, Event())
    module.verbose = 2

    await module.attack(request)

    assert persister.add_payload.call_count
    assert persister.add_payload.call_args_list[-1][1]["info"] == (
        '{"versions": ["5.6.1"], "name": "WordPress", "categories": ["CMS", "Blogs"]}'
    )
    await crawler.close()
Esempio n. 26
0
async def test_vulnerabilities():
    # Test for vulnerabilities detected
    respx.get("http://perdu.com/").mock(return_value=httpx.Response(
        200,
        text=
        "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \
            <h2>Pas de panique, on va vous aider</h2> \
            <strong><pre>    * <----- vous &ecirc;tes ici</pre></strong> \
            </body></html>",
        headers={
            "X-Generator": "Backdrop CMS 4.5",
            "Server": "Cherokee/1.3.4"
        }))

    persister = FakePersister()

    request = Request("http://perdu.com")
    request.path_id = 1

    crawler = AsyncCrawler("http://perdu.com")
    options = {"timeout": 10, "level": 2}
    logger = Mock()

    module = mod_wapp(crawler, persister, logger, options, Event())
    module.verbose = 2

    await module.attack(request)

    assert persister.vulnerabilities
    assert persister.vulnerabilities[0][
        0] == '{"versions": ["4.5"], "name": "Backdrop", "categories": ["CMS"]}'
    assert persister.vulnerabilities[0][
        1] == 'Fingerprint web application framework'
    assert persister.vulnerabilities[1][0] == \
           '{"versions": ["1.3.4"], "name": "Cherokee", "categories": ["Web servers"]}'
    assert persister.vulnerabilities[1][1] == 'Fingerprint web server'
    await crawler.close()