def insertSuccess(self, msg):
        """
        success crawle the article msg, insert into the successed db, insert into mongodb
        """
        try:
            self.mysql_client.begin()
#             print article
#             print msg["url"]
            
            article = self.mysql_client.getOne("select * from failed_url where url=%s", (msg["url"], ))
            if article != False:
                article = self.mysql_client.delete("delete from failed_url where url=%s", (msg["url"], ))
                LOGGER.info("delete the article from failed_url: %s", msg["url"])

            article = self.mysql_client.getOne("select * from successed_url where url=%s", (msg["url"], ))
            if article != False:
                LOGGER.info("repeat crawler the article give up save: %s", msg["url"])
                return
            
            self.mongo_client.save(msg)
            LOGGER.debug("insert into mongo: %s@%s" %(msg["title"], msg["url"]))
            
            self.mysql_client.insertOne("insert into successed_url(url, tag, sub_tag, version, create_time) values(%s, %s, %s, %s, %s)",  \
                                        (msg["url"], msg["tag"], msg["sub_tag"], VERSION, msg["create_time"]));
                                        
            LOGGER.debug("insert successed_url %s" %(msg["url"], ))
            self.mysql_client.end("commit")

        except Exception, e:
            self.mysql_client.end("rollback")

            self.mysql_client.begin()
            self.insertFailed(msg)
            LOGGER.error("insert into mongo/successed_url error: %s"  %(msg["url"]))
            LOGGER.error(traceback.format_exc())
Beispiel #2
0
    def crawlArticle(self, msg):
        """
        crawler the article referer by msg
        """

        url = msg["url"]
        try:

            driver = webdriver.PhantomJS(PHANTOMJS_PATH)
            driver.set_page_load_timeout(10)
            article = self.mysql_client.getOne(
                "select * from successed_url where url=%s", (msg["url"], ))
            if article != False:
                LOGGER.info("repeat crawler the article give up save: %s",
                            msg["url"])
                return

            LOGGER.debug("start extractor from %s" % (url, ))
            driver.get(url)
            try:
                #scroll bar set from bottom to top, make the page load all
                js = "var q=document.documentElement.scrollTop=10000"
                driver.execute_script(js)
                js = "var q=document.documentElement.scrollTop=0"
                driver.execute_script(js)
                articles = self.pharseContext(driver)
                msg["text"] = articles
                self.insertSuccess(msg)

            except Exception, e:
                LOGGER.error(traceback.format_exc())
                LOGGER.error("url: %s" % (msg["url"], ))
                self.insertFailed(msg)

        except TimeoutException, e:
            #scroll bar set from bottom to top, make the page load all
            try:
                js = "var q=document.documentElement.scrollTop=10000"
                driver.execute_script(js)
                js = "var q=document.documentElement.scrollTop=0"
                driver.execute_script(js)
                #                 title = driver.find_element_by_css_selector("h1[id=\"h1title\"]").text
                articles = self.pharseContext(driver)
                msg["text"] = articles
                self.insertSuccess(msg)

            except Exception, e:
                self.insertFailed(msg)
                LOGGER.error(traceback.format_exc())
                LOGGER.error("url: %s" % (msg["url"], ))
                driver.quit()
    def crawlArticle(self, msg):
        """
        crawler the article referer by msg
        """
        
        url = msg["url"]
        try:
            
            driver = webdriver.PhantomJS(PHANTOMJS_PATH)
            driver.set_page_load_timeout(10)
            article = self.mysql_client.getOne("select * from successed_url where url=%s", (msg["url"], ))
            if article != False:
                LOGGER.info("repeat crawler the article give up save: %s", msg["url"])
                return

            LOGGER.debug("start extractor from %s" %(url, ))
            driver.get(url)
            try:
                #scroll bar set from bottom to top, make the page load all
                js = "var q=document.documentElement.scrollTop=10000"
                driver.execute_script(js)
                js = "var q=document.documentElement.scrollTop=0"
                driver.execute_script(js)
                articles = self.pharseContext(driver)
                msg["text"] = articles
                self.insertSuccess(msg)

            except Exception, e:
                LOGGER.error(traceback.format_exc())
                LOGGER.error("url: %s" %(msg["url"],))
                self.insertFailed(msg)
  
        except TimeoutException, e:
            #scroll bar set from bottom to top, make the page load all
            try:
                js = "var q=document.documentElement.scrollTop=10000"
                driver.execute_script(js)
                js = "var q=document.documentElement.scrollTop=0"
                driver.execute_script(js)
#                 title = driver.find_element_by_css_selector("h1[id=\"h1title\"]").text
                articles = self.pharseContext(driver)
                msg["text"] = articles
                self.insertSuccess(msg)
                
            except Exception, e:
                self.insertFailed(msg)
                LOGGER.error(traceback.format_exc())
                LOGGER.error("url: %s" %(msg["url"], ))
                driver.quit()
Beispiel #4
0
    def insertSuccess(self, msg):
        """
        success crawle the article msg, insert into the successed db, insert into mongodb
        """
        try:
            self.mysql_client.begin()
            #             print article
            #             print msg["url"]

            article = self.mysql_client.getOne(
                "select * from failed_url where url=%s", (msg["url"], ))
            if article != False:
                article = self.mysql_client.delete(
                    "delete from failed_url where url=%s", (msg["url"], ))
                LOGGER.info("delete the article from failed_url: %s",
                            msg["url"])

            article = self.mysql_client.getOne(
                "select * from successed_url where url=%s", (msg["url"], ))
            if article != False:
                LOGGER.info("repeat crawler the article give up save: %s",
                            msg["url"])
                return

            self.mongo_client.save(msg)
            LOGGER.debug("insert into mongo: %s@%s" %
                         (msg["title"], msg["url"]))

            self.mysql_client.insertOne("insert into successed_url(url, tag, sub_tag, version, create_time) values(%s, %s, %s, %s, %s)",  \
                                        (msg["url"], msg["tag"], msg["sub_tag"], VERSION, msg["create_time"]))

            LOGGER.debug("insert successed_url %s" % (msg["url"], ))
            self.mysql_client.end("commit")

        except Exception, e:
            self.mysql_client.end("rollback")

            self.mysql_client.begin()
            self.insertFailed(msg)
            LOGGER.error("insert into mongo/successed_url error: %s" %
                         (msg["url"]))
            LOGGER.error(traceback.format_exc())
 def run(self):
     try:
         LOGGING = {'version': 1   }
         QUEUE_NAME = "news_article"
         LOGGER.info("start the news crawler")
         threadCount = CRAWLER_THREAD_COUNT
         messageHandlerList = []
         workThreadList = []
         for _ in range(threadCount):
             messageHandler = CrawlerMessageHandler(crawlerMapper)
             messageHandler.set_inputmessage(QUEUE_NAME)
             messageHandlerList.append(messageHandler)
             workerThread = threading.Thread(target=messageHandler.start,args=(LOGGING))
             workerThread.start()
             workThreadList.append(workerThread)
         
         for worker in workThreadList:
             worker.join()
             
     except Exception,e:
         LOGGER.error(traceback.format_exc())
    def run(self):
        try:
            LOGGING = {'version': 1}
            QUEUE_NAME = "news_article"
            LOGGER.info("start the news crawler")
            threadCount = CRAWLER_THREAD_COUNT
            messageHandlerList = []
            workThreadList = []
            for _ in range(threadCount):
                messageHandler = CrawlerMessageHandler(crawlerMapper)
                messageHandler.set_inputmessage(QUEUE_NAME)
                messageHandlerList.append(messageHandler)
                workerThread = threading.Thread(target=messageHandler.start,
                                                args=(LOGGING))
                workerThread.start()
                workThreadList.append(workerThread)

            for worker in workThreadList:
                worker.join()

        except Exception, e:
            LOGGER.error(traceback.format_exc())
    def __init__(self, pidfile, stdin=os.devnull, stdout=os.devnull, stderr=os.devnull ):
        super(Crawler, self).__init__(pidfile , stdin, stdout, stderr)

    def run(self):
        try:
            LOGGING = {'version': 1   }
            QUEUE_NAME = "news_article"
            LOGGER.info("start the news crawler")
            threadCount = CRAWLER_THREAD_COUNT
            messageHandlerList = []
            workThreadList = []
            for _ in range(threadCount):
                messageHandler = CrawlerMessageHandler(crawlerMapper)
                messageHandler.set_inputmessage(QUEUE_NAME)
                messageHandlerList.append(messageHandler)
                workerThread = threading.Thread(target=messageHandler.start,args=(LOGGING))
                workerThread.start()
                workThreadList.append(workerThread)
            
            for worker in workThreadList:
                worker.join()
                
        except Exception,e:
            LOGGER.error(traceback.format_exc())
        finally:
            LOGGER.info("end the news crawler")

if __name__ == "__main__":
#     daemon_main(Crawler, 'c', sys.argv)
    crawler = Crawler("./")
    crawler.run()
    def run(self):
        try:
            LOGGING = {'version': 1}
            QUEUE_NAME = "news_article"
            LOGGER.info("start the news crawler")
            threadCount = CRAWLER_THREAD_COUNT
            messageHandlerList = []
            workThreadList = []
            for _ in range(threadCount):
                messageHandler = CrawlerMessageHandler(crawlerMapper)
                messageHandler.set_inputmessage(QUEUE_NAME)
                messageHandlerList.append(messageHandler)
                workerThread = threading.Thread(target=messageHandler.start,
                                                args=(LOGGING))
                workerThread.start()
                workThreadList.append(workerThread)

            for worker in workThreadList:
                worker.join()

        except Exception, e:
            LOGGER.error(traceback.format_exc())
        finally:
            LOGGER.info("end the news crawler")


if __name__ == "__main__":
    #     daemon_main(Crawler, 'c', sys.argv)
    crawler = Crawler("./")
    crawler.run()