Beispiel #1
0
    def get_data(self):
        r = []
        try:
            for d in self.db.quote.find():
                r.append(d)
            return r

        except Exception as e:
            self.write_log('fetchinDataError', e)
            Logger().error(str(e))
Beispiel #2
0
    def save(self, dic):
        try:
            self.db.quote.insert(dic)
            print('data from ' + dic['author'] + ' in ' + dic['language'] +
                  ' added successfully!')
        except Exception as e:
            self.write_log('insertionError', e)
            Logger().error(str(e))

            sys.exit(1)
Beispiel #3
0
def scrap(url):
    r = requests.get(url)
    if r.status_code == 200:
        soup = BeautifulSoup(r.text, "html.parser")
        return soup
    else:
        d = DAO()
        d.write_log('crawlerError', str(r.status_code) + ' ' + url)
        Logger().error(str(r.status_code) + ' ' + url)
        print('registered connection error!', str(r.status_code) + ' ' + url)
        sys.exit(1)
Beispiel #4
0
 def __get_log(self):
     r = []
     try:
         if self.db.log.find():
             for d in self.db.log.find():
                 r.append(d)
         else:
             return []
     except Exception as e:
         self.write_log('error', e)
         Logger().error(str(e))
Beispiel #5
0
 def url_set_up(self, author, language):
     if len(self.d.check_author(author)) > 0:
         status = 'All quotes from ' + author + ' in '+language+'  are up to date!'
         self.d.write_log('status', status)
         Logger().info(status)
         print("Log registered!")
         return False
     else:
         print('****** formating url **** ')
         addr = self.url + author
         print(addr)
         self.__fetch_fr(scrap(addr), author)
         return True