Ejemplo n.º 1
0
def singlescan(url):
    """instance to scan single targeted domain"""

    if urlparse(url).query != '':
        if scanner.scan([url]) != []:
            # scanner.scan print if vulnerable
            # therefore exit
            exit(0)

        else:
            print ""  # move carriage return to newline
            io.stdout("no SQL injection vulnerability found")
            option = io.stdin("do you want to crawl and continue scanning? [Y/N]", ["Y", "N"], upper=True)

            if option == 'N':
                return False

    # crawl and scan the links
    # if crawl cannot find links, do some reverse domain
    io.stdout("crawling {}".format(url))
    urls = crawler.crawl(url)

    if not urls:
        io.stdout("found no suitable urls to test SQLi")
        #io.stdout("you might want to do reverse domain")
        return False

    io.stdout("found {} urls from crawling".format(len(urls)))
    vulnerables = scanner.scan(urls)

    if vulnerables == []:
        io.stdout("no SQL injection vulnerability found")
        return False

    return vulnerables
Ejemplo n.º 2
0
def index():
    me = {
        "name": "Cristina M.",
    }
    votes = []
    total_pages = crawler.get_total_pages("http://woobox.com/2evorj/gallery")
    for page in range(1, total_pages):
        url = "http://woobox.com/2evorj/context/votepage?page={}&marker=52&ajax=1".format(
            page)
        votes += crawler.crawl(url, me, votes)

    final_podium = crawler.get_podium(votes, me)
    return render_template('dinamic.html', me=me, ranking=final_podium)
Ejemplo n.º 3
0
def singleScan(url):
    """instance to scan single targeted domain"""

    if urlparse(url).query != '':
        io.stdout("scanning {}".format(url), end="")

        if scanner.scan(url):
            io.showsign(" vulnerable")
            exit(0)

        else:
            print ""  # move carriage return to newline
            io.stdout("no SQL injection vulnerability found")

            option = io.stdin(
                "do you want to crawl and continue scanning? [Y/N]").upper()
            while option != 'Y' and option != 'N':
                option = io.stdin(
                    "do you want to crawl and continue scanning? [Y/N]").upper(
                    )

            if option == 'N':
                return False

    # crawl and scan the links
    # if crawl cannot find links, do some reverse domain
    io.stdout("crawling {}".format(url))
    websites = crawler.crawl(url)
    if not websites:
        io.stdout("found no suitable urls to test SQLi")
        #io.stdout("you might want to do reverse domain")
        return False

    io.stdout("found {} urls from crawling".format(len(websites)))
    vulnerables = massiveScan(websites)

    if vulnerables == []:
        io.stdout("no SQL injection vulnerability found")
        return False

    return vulnerables
Ejemplo n.º 4
0
import logging

from src import crawler, text_analysis
from src.utils import init_logging

init_logging()

if __name__ == '__main__':
    corpus = crawler.crawl(type_='Subreddit', name='financialindependence')
    corpus_meta = text_analysis.CorpusMetadata(corpus)
    corpus_meta.analyze()

    logging.info('### Report ###')
    logging.info('Most popular words:')
    logging.info(corpus_meta.top_words())