示例#1
0
def main(args):
    searcher = Searcher(args["limit"])
    workers = []

    if "domains" in args:
        # load domains from file
        domains = load_domains(args["domains"])

        for domain in domains:
            if domain == "":
                continue

            # lookup in search engine
            result = searcher.google_search(domain)

            # start the worker
            worker = Worker(domain, result.urls, result.page_source)
            workers.append(worker)

        print "\nNow waiting for workers to finish"

    else:
        # lookup in search engine
        result = searcher.google_search(args["domain"])

        # start the worker
        worker = Worker(args["domain"], result.urls, result.page_source)
        workers.append(worker)

    searcher.close()

    # wait for all workers to finish
    for worker in workers:
        worker.wait()

    # write emails to a file
    if "output" in args:
        write_excel_file(args["output"], workers)

    print "\nFinished scraping!\n"

    # output all emails
    for worker in workers:
        for email in worker.emails:
            print "> " + email