def save_item(self, item): '''写数据库''' session = Session() try: session.add(Item(**item)) session.commit() except Exception as e: logger.warning('页面 {} 的数据写入数据库出错,错误原因{}'.format(item['link'], e)) session.rollback() session.close()
def main(): logger.setLevel(logging.INFO) handler = logging.FileHandler('../log/crawler.log') formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') handler.setFormatter(formatter) logger.addHandler(handler) contest_list = ['abc002'] with Crawler() as crawler: scraper = Scraper(crawler) for contest in contest_list: db_session = Session() try: # scraper.crawl_results(contest, db_session) scraper.crawl_contest_by_id(contest, db_session) except Exception as e: db_session.rollback() raise e finally: db_session.commit() db_session.close()
from __future__ import print_function import sys from model import Session, User, Topic session = Session() # Add a user to a topic. t1 = Topic("and now for something completely different") t1.author = User("xyzzy") # Add some topics to a user. u2 = User("plugh") u2.topics = [ Topic("my hovercraft is full of eels"), Topic("he is pining for the fjords"), ] try: session.add(t1) session.add(u2) session.commit() except Exception as e: print("Failed:", e) session.rollback() sys.exit(1)